climate-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From lewi...@apache.org
Subject [1/2] climate git commit: CLIMATE-885 Upgrade to Podaacpy 1.9.0
Date Thu, 20 Apr 2017 18:12:29 GMT
Repository: climate
Updated Branches:
  refs/heads/master f0b28c7d6 -> c656b3b46


CLIMATE-885 Upgrade to Podaacpy 1.9.0


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/44402fb0
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/44402fb0
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/44402fb0

Branch: refs/heads/master
Commit: 44402fb0c6184057eb565a6c0b300f5d406501a9
Parents: f0b28c7
Author: Lewis John McGibbney <lewis.mcgibbney@gmail.com>
Authored: Thu Apr 20 01:03:25 2017 -0700
Committer: Lewis John McGibbney <lewis.mcgibbney@gmail.com>
Committed: Thu Apr 20 01:03:25 2017 -0700

----------------------------------------------------------------------
 .gitignore                             |  5 +++
 examples/podaac_integration_example.py |  6 +--
 examples/simple_model_tstd.py          | 16 ++++----
 ocw/data_source/local.py               | 10 ++---
 ocw/data_source/podaac_datasource.py   | 63 +++++++++++++++++++----------
 ocw/dataset_loader.py                  |  2 +-
 ocw/tests/subset.json                  |  6 +--
 ocw/tests/test_podaac.py               | 31 +++++++++-----
 8 files changed, 86 insertions(+), 53 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/44402fb0/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index a5062d7..00e1d24 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,3 +11,8 @@ examples/*.png
 build/
 docs/build/
 Apache_Open_Climate_Workbench.egg-info/
+.project
+.pydevproject
+dist/
+doc.txt
+ocw.egg-info/

http://git-wip-us.apache.org/repos/asf/climate/blob/44402fb0/examples/podaac_integration_example.py
----------------------------------------------------------------------
diff --git a/examples/podaac_integration_example.py b/examples/podaac_integration_example.py
index a06fc94..7b8bb10 100644
--- a/examples/podaac_integration_example.py
+++ b/examples/podaac_integration_example.py
@@ -25,9 +25,9 @@ variable = 'uwnd'
 name = 'PO.DAAC_test_dataset'
 OUTPUT_PLOT = "ccmp_temporal_std"
 """ Step 1: Load Local NetCDF Files into OCW Dataset Objects """
-print("Loading %s dataset short name into a OCW dataset object." % datasetId)
-ccmp_dataset = podaac.load_level4_granule(
-    variable=variable, datasetId=datasetId, name=name)
+print("Extracting Level4 granule %s and converting it into a OCW dataset object." % datasetId)
+ccmp_dataset = podaac.extract_l4_granule(
+    variable=variable, dataset_id=datasetId, name=name)
 print("CCMP_Dataset.values shape: (times, lats, lons) - %s \n" %
       (ccmp_dataset.values.shape,))
 

http://git-wip-us.apache.org/repos/asf/climate/blob/44402fb0/examples/simple_model_tstd.py
----------------------------------------------------------------------
diff --git a/examples/simple_model_tstd.py b/examples/simple_model_tstd.py
index 79c19d2..fb3ce48 100644
--- a/examples/simple_model_tstd.py
+++ b/examples/simple_model_tstd.py
@@ -38,11 +38,11 @@ else:
     urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
 
 """ Step 1: Load Local NetCDF File into OCW Dataset Objects """
-print "Loading %s into an OCW Dataset Object" % (FILE_1,)
+print("Loading %s into an OCW Dataset Object" % (FILE_1,))
 # 'tasmax' is variable name of values
 knmi_dataset = local.load_file(FILE_1, "tasmax")
 
-print "KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,)
+print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
 
 # Acessing latittudes and longitudes of netCDF file
 lats = knmi_dataset.lats
@@ -50,7 +50,7 @@ lons = knmi_dataset.lons
 
 """ Step 2:  Build a Metric to use for Evaluation - Temporal STD for this example """
 # You can build your own metrics, but OCW also ships with some common metrics
-print "Setting up a Temporal STD metric to use for evaluation"
+print("Setting up a Temporal STD metric to use for evaluation")
 std = metrics.TemporalStdDev()
 
 """ Step 3: Create an Evaluation Object using Datasets and our Metric """
@@ -58,11 +58,11 @@ std = metrics.TemporalStdDev()
 # Evaluation(reference, targets, metrics, subregions=None)
 # Evaluation can take in multiple targets and metrics, so we need to convert
 # our examples into Python lists.  Evaluation will iterate over the lists
-print "Making the Evaluation definition"
+print("Making the Evaluation definition")
 # Temporal STD Metric gets one target dataset then reference dataset
 # should be None
 std_evaluation = evaluation.Evaluation(None, [knmi_dataset], [std])
-print "Executing the Evaluation using the object's run() method"
+print("Executing the Evaluation using the object's run() method")
 std_evaluation.run()
 
 """ Step 4: Make a Plot from the Evaluation.results """
@@ -73,12 +73,12 @@ std_evaluation.run()
 # The shape of results is (num_metrics, num_target_datasets) if no subregion
 # Accessing the actual results when we have used 1 metric and 1 dataset is
 # done this way:
-print "Accessing the Results of the Evaluation run"
+print("Accessing the Results of the Evaluation run")
 results = std_evaluation.unary_results[0][0]
-print "The results are of type: %s" % type(results)
+print("The results are of type: %s" % type(results))
 
 # From the temporal std output I want to make a Contour Map of the region
-print "Generating a contour map using ocw.plotter.draw_contour_map()"
+print("Generating a contour map using ocw.plotter.draw_contour_map()")
 
 fname = OUTPUT_PLOT
 gridshape = (4, 5)  # 20 Years worth of plots. 20 rows in 1 column

http://git-wip-us.apache.org/repos/asf/climate/blob/44402fb0/ocw/data_source/local.py
----------------------------------------------------------------------
diff --git a/ocw/data_source/local.py b/ocw/data_source/local.py
index b494d84..adc89b6 100644
--- a/ocw/data_source/local.py
+++ b/ocw/data_source/local.py
@@ -457,14 +457,11 @@ def load_dataset_from_multiple_netcdf_files(variable_name, variable_unit=None,
         (an observation or a model) into a Dataset. \
     The dataset can be spatially subset.
 
-    :param filelist: A text file including a list of filenames
-    :type filelist: :mod:`string`
-
     :param variable_name: The variable name to load from the NetCDF file.
     :type variable_name: :mod:`string`
 
-    :param variable_name: The variable's unit to load from the NetCDF file.
-    :type variable_name: :mod:`string`
+    :param variable_unit: The variable's unit to load from the NetCDF file.
+    :type variable_unit: :mod:`string`
 
     :param lat_name: (Optional) The latitude variable name to extract from the \
         dataset.
@@ -481,6 +478,9 @@ def load_dataset_from_multiple_netcdf_files(variable_name, variable_unit=None,
     :param name: (Optional) A name for the loaded dataset.
     :type name: :mod:`string`
 
+    :param file_list: A text file including a list of filenames
+    :type file_list: :mod:`string`
+
     :param file_path: Directory to the NetCDF file to load.
     :type file_path: :mod:`string`
 

http://git-wip-us.apache.org/repos/asf/climate/blob/44402fb0/ocw/data_source/podaac_datasource.py
----------------------------------------------------------------------
diff --git a/ocw/data_source/podaac_datasource.py b/ocw/data_source/podaac_datasource.py
index 6feb02b..708b72a 100644
--- a/ocw/data_source/podaac_datasource.py
+++ b/ocw/data_source/podaac_datasource.py
@@ -22,6 +22,9 @@ from ocw.dataset import Dataset
 from netCDF4 import Dataset as netcdf_dataset
 from netcdftime import utime
 import os
+import sys
+import time
+import itertools
 
 
 def convert_times_to_datetime(time):
@@ -53,13 +56,27 @@ def list_available_extract_granule_dataset_ids():
     podaac_utils = PodaacUtils()
     return podaac_utils.list_available_extract_granule_dataset_ids()
 
-def subset_granule(input_file_path=''):
+def subset_granule(variable, dataset_id='', name='', path='/tmp', input_file_path=''):
     '''Subset Granule service allows users to Submit subset jobs. \
         Use of this service should be preceded by a Granule Search in \
         order to identify and generate a list of granules to be subsetted.
 
+    :param variable: The name of the variable to read from the dataset.
+    :type variable: :mod:`string`
+
+    :param dataset_id: dataset persistent ID. datasetId or \
+        shortName is required for a granule search. Example: \
+        PODAAC-ASOP2-25X01
+    :type dataset_id: :mod:`string`
+
+    :param name: (Optional) A name for the loaded dataset.
+    :type name: :mod:`string`
+
+    :param path: (Optional) a path on the filesystem to store the granule.
+    :type path: :mod:`string`
+
     :param input_file_path: path to a json file which contains the \
-        the request that you want to send to PO.DAAC
+        the subset request that you want to send to PO.DAAC
     :type input_file_path: :mod:`string`
 
     :returns: a token on successful request reception. This can be \
@@ -67,41 +84,43 @@ def subset_granule(input_file_path=''):
 
     '''
     podaac = Podaac()
-    status = podaac.subset_status(podaac.granule_subset(input_file_path))
-    print("Granule subsetting initiated with request tracking token '%s'." % status)
-    while status is not "done":
-        print('...')
-    return status
+    if path is not None:
+        path = os.path.dirname(os.path.abspath(__file__))
+    granule_name = podaac.granule_subset(input_file_path, path)
+    path = path + '/' + granule_name
+    return read_dataset(name, granule_name, variable, path)
 
-def load_level4_granule(variable, datasetId='', name=''):
+def extract_l4_granule(variable, dataset_id='', name='', path='/tmp'):
     '''Loads a Level4 gridded Dataset from PODAAC
     :param variable: The name of the variable to read from the dataset.
     :type variable: :mod:`string`
 
-    :param datasetId: dataset persistent ID. datasetId or \
+    :param dataset_id: dataset persistent ID. datasetId or \
         shortName is required for a granule search. Example: \
         PODAAC-ASOP2-25X01
-    :type datasetId: :mod:`string`
-
-    :param shortName: the shorter name for a dataset. \
-        Either shortName or datasetId is required for a \
-        granule search. Example: ASCATA-L2-25km
-    :type shortName: :mod:`string`
+    :type dataset_id: :mod:`string`
 
     :param name: (Optional) A name for the loaded dataset.
     :type name: :mod:`string`
 
+    :param path: a path on the filesystem to store the granule.
+    :type path: :mod:`string`
+
     :returns: A :class:`dataset.Dataset` containing the dataset pointed to by
         the OpenDAP URL.
 
     :raises: ServerError
     '''
-    # Downloading the dataset using podaac toolkit
     podaac = Podaac()
-    path = os.path.dirname(os.path.abspath(__file__))
-    granuleName = podaac.extract_l4_granule(
-        dataset_id=datasetId, path=path)
-    path = path + '/' + granuleName
+    if path is not None:
+        path = os.path.dirname(os.path.abspath(__file__))
+    granule_name = podaac.extract_l4_granule(
+        dataset_id=dataset_id, path=path)
+    print("Downloaded Level4 Granule '%s' to %s" % (granule_name, path))
+    path = path + '/' + granule_name
+    return read_dataset(name, granule_name, variable, path)
+
+def read_dataset(name='', granule_name ='', variable=None, path='/tmp'):
     d = netcdf_dataset(path, mode='r')
     dataset = d.variables[variable]
 
@@ -129,13 +148,13 @@ def load_level4_granule(variable, datasetId='', name=''):
     values = np.array(dataset[:])
     origin = {
         'source': 'PO.DAAC',
-        'url': 'podaac.jpl.nasa.gov/ws'
+        'url': 'podaac.jpl.nasa.gov'
     }
 
     # Removing the downloaded temporary granule before creating the OCW
     # dataset.
     d.close()
-    path = os.path.join(os.path.dirname(__file__), granuleName)
+    path = os.path.join(os.path.dirname(__file__), granule_name)
     os.remove(path)
 
     return Dataset(lats, lons, times, values, variable, name=name, origin=origin)

http://git-wip-us.apache.org/repos/asf/climate/blob/44402fb0/ocw/dataset_loader.py
----------------------------------------------------------------------
diff --git a/ocw/dataset_loader.py b/ocw/dataset_loader.py
index 5a7f6ea..4b2a925 100644
--- a/ocw/dataset_loader.py
+++ b/ocw/dataset_loader.py
@@ -94,7 +94,7 @@ class DatasetLoader:
             'local': local.load_multiple_files,
             'local_split': local.load_dataset_from_multiple_netcdf_files,
             'rcmed': rcmed.parameter_dataset,
-            'podaac': podaac.load_level4_granule
+            'podaac': podaac.extract_l4_granule
         }
         
         # Exclude esgf and dap for python 3 until they are compatible

http://git-wip-us.apache.org/repos/asf/climate/blob/44402fb0/ocw/tests/subset.json
----------------------------------------------------------------------
diff --git a/ocw/tests/subset.json b/ocw/tests/subset.json
index c00891a..a3578f3 100644
--- a/ocw/tests/subset.json
+++ b/ocw/tests/subset.json
@@ -2,10 +2,10 @@
     "email":"yournamem@emailaddress.com",
     "query":[
         {
-            "datasetId":"PODAAC-ASOP2-25X01",
-            "granuleIds":["ascat_20160409_113000_metopa_49153_eps_o_250_2401_ovw.l2.nc"],
+            "datasetId":"PODAAC-GHRAM-4FA01",
+            "granuleIds":["20170418-ABOM-L4HRfnd-AUS-v01-fv01_0-RAMSSA_09km.nc"],
             "bbox":"-180,-90,180,90",
-            "variables":["wvc_index","model_speed","model_dir","ice_prob","ice_age","wvc_quality_flag","wind_speed","wind_dir","bs_distance","lat","lon","time"],
+            "variables":["analysed_sst", "lat","lon","time"],
             "compact":false
         }
     ]

http://git-wip-us.apache.org/repos/asf/climate/blob/44402fb0/ocw/tests/test_podaac.py
----------------------------------------------------------------------
diff --git a/ocw/tests/test_podaac.py b/ocw/tests/test_podaac.py
index 89bc143..d253192 100644
--- a/ocw/tests/test_podaac.py
+++ b/ocw/tests/test_podaac.py
@@ -32,45 +32,54 @@ class TestPodaacDataSource(unittest.TestCase):
         cls.name = 'PO.DAAC_test_dataset'
         cls.file_path = os.path.dirname(os.path.abspath(__file__))
         cls.format = '.nc'
-        cls.dataset = podaac.load_level4_granule(
-            cls.variable, cls.datasetId, cls.name)
-        #Until we can retrieve the subset data download link programmatically,
-        #we will need to skip this test. More information can be see at 
-        #https://podaac.jpl.nasa.gov/forum/viewtopic.php?f=53&t=424&p=790
-        #cls.json = 'subset.json'
-        #cls.granule_subset = podaac.subset_granule(cls.json)
+        cls.dataset = podaac.extract_l4_granule(cls.variable, cls.datasetId, cls.name)
+        cls.json = 'subset.json'
+        cls.subset_datasetId = 'PODAAC-GHRAM-4FA01'
+        cls.subset_variable = 'analysed_sst'
+        cls.subset_name = 'GHRSST Level 4 RAMSSA Australian Regional Foundation Sea Surface
Temperature Analysis'
+        cls.granule_subset = podaac.subset_granule(
+            cls.subset_variable,
+            cls.subset_datasetId,
+            name=cls.subset_name,
+            input_file_path=cls.json)
 
     def test_is_dataset(self):
+        print('in test_is_dataset')
         self.assertTrue(isinstance(self.dataset, Dataset))
 
     def test_dataset_lats(self):
+        print('in test_dataset_lats')
         self.assertEquals(len(self.dataset.lats), 901)
 
     def test_dataset_lons(self):
+        print('in test_dataset_lons')
         self.assertEquals(len(self.dataset.lons), 1800)
 
     def test_dataset_times(self):
+        print('in test_dataset_times')
         self.assertEquals(len(self.dataset.times), 1)
 
     def test_dataset_values(self):
+        print('in test_dataset_values')
         self.assertEquals(len(self.dataset.values), 1)
 
     def test_valid_date_conversion(self):
+        print('in test_valid_date_conversion')
         start = dt.datetime(1991, 9, 2, 12)
         self.assertTrue(start == self.dataset.times[0])
 
     def test_dataset_origin(self):
+        print('in test_dataset_origin')
         self.assertEquals(self.dataset.origin['source'], 'PO.DAAC')
         self.assertEquals(self.dataset.origin['url'], 'podaac.jpl.nasa.gov/ws')
 
     def test_custom_name(self):
+        print('in test_custom_name')
         self.assertEquals(self.dataset.name, self.name)
 
     def test_granule_subset(self):
-        #Until we can retrieve the subset data download link programmatically,
-        #we will need to skip this test. More information can be see at 
-        #https://podaac.jpl.nasa.gov/forum/viewtopic.php?f=53&t=424&p=790
-        pass
+        print('in test_granule_subset')
+        self.assertEquals(self.granule_subset.name, self.subset_name)
 
 if __name__ == '__main__':
     unittest.main()


Mime
View raw message