climate-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jo...@apache.org
Subject svn commit: r1570445 - in /incubator/climate/trunk/ocw-ui/backend: processing.py tests/test_processing.py
Date Fri, 21 Feb 2014 04:37:40 GMT
Author: joyce
Date: Fri Feb 21 04:37:38 2014
New Revision: 1570445

URL: http://svn.apache.org/r1570445
Log:
CLIMATE-332 - Add binary metric plot title creation and test

Modified:
    incubator/climate/trunk/ocw-ui/backend/processing.py
    incubator/climate/trunk/ocw-ui/backend/tests/test_processing.py

Modified: incubator/climate/trunk/ocw-ui/backend/processing.py
URL: http://svn.apache.org/viewvc/incubator/climate/trunk/ocw-ui/backend/processing.py?rev=1570445&r1=1570444&r2=1570445&view=diff
==============================================================================
--- incubator/climate/trunk/ocw-ui/backend/processing.py (original)
+++ incubator/climate/trunk/ocw-ui/backend/processing.py Fri Feb 21 04:37:38 2014
@@ -478,7 +478,7 @@ def _generate_binary_eval_plot_title(eva
     :returns: The plot title for the requested metric run.
     '''
     return "{} of {} compared to {}".format(
-        evaluation.metrics[metric_index].__class__.__name__.lower(),
+        evaluation.metrics[metric_index].__class__.__name__,
         evaluation.ref_dataset.name,
         evaluation.target_datasets[dataset_index].name
     )

Modified: incubator/climate/trunk/ocw-ui/backend/tests/test_processing.py
URL: http://svn.apache.org/viewvc/incubator/climate/trunk/ocw-ui/backend/tests/test_processing.py?rev=1570445&r1=1570444&r2=1570445&view=diff
==============================================================================
--- incubator/climate/trunk/ocw-ui/backend/tests/test_processing.py (original)
+++ incubator/climate/trunk/ocw-ui/backend/tests/test_processing.py Fri Feb 21 04:37:38 2014
@@ -155,6 +155,26 @@ class TestFilePathCreation(unittest.Test
             '/tmp/ocw/t2_temporalstddev'
         )
 
+class TestPlotTitleCreation(unittest.TestCase):
+    def setUp(self):
+        self.full_evaluation = Evaluation(
+            _create_fake_dataset('Ref'),
+            [_create_fake_dataset('T1'), _create_fake_dataset('T2')],
+            [metrics.TemporalStdDev(), metrics.Bias(), metrics.Bias()]
+        )
+
+        self.unary_evaluation = Evaluation(
+            None,
+            [_create_fake_dataset('T1'), _create_fake_dataset('T2')],
+            [metrics.TemporalStdDev()]
+        )
+
+    def test_binary_plot_title_generation(self):
+        self.assertEquals(
+            bp._generate_binary_eval_plot_title(self.full_evaluation, 0, 1),
+            'Bias of Ref compared to T1'
+        )
+
 def _create_fake_dataset(name):
     lats = numpy.array(range(-10, 25, 1))
     lons = numpy.array(range(-30, 40, 1))



Mime
View raw message