climate-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From huiky...@apache.org
Subject [01/12] climate git commit: CLIMATE-467: Replace the existing time decoding funtion with netCDF4 modules.
Date Wed, 15 Jul 2015 05:08:14 GMT
Repository: climate
Updated Branches:
  refs/heads/master 6b24b8d18 -> 751c7a418


CLIMATE-467: Replace the existing time decoding funtion with netCDF4 modules.


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/cfb120e5
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/cfb120e5
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/cfb120e5

Branch: refs/heads/master
Commit: cfb120e5eaea004e850b884e49d41f8a0c269b75
Parents: 8469bca
Author: Huikyo Lee <huikyole@huikyole-2481551.rn.jpl.net>
Authored: Sat Aug 9 21:48:19 2014 -0700
Committer: Kyo <kyo@Kyo.local>
Committed: Thu Sep 11 14:54:10 2014 -0700

----------------------------------------------------------------------
 ocw/dataset_processor.py | 72 +++++++++++++++++++++++++++++++++++++++++++
 ocw/utils.py             | 44 +++++++-------------------
 2 files changed, 83 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/cfb120e5/ocw/dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
old mode 100644
new mode 100755
index c9f0196..e57263b
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -29,6 +29,47 @@ import logging
 
 logger = logging.getLogger(__name__)
 
+def temporal_subset(target_dataset, month_index):
+    """ Temporally subset data between month_begin and month_end in each year.
+       For example, to extract DJF time series, monthBegin = 12, monthEnd =2
+       This can handle monthBegin=monthEnd i.e. for time series of a specific month
+
+    :param target_dataset: Dataset object that needs temporal subsetting 
+    :type target_dataset: Open Climate Workbench Dataset Object
+    :param month_index: an integer array of subset months (December ~ February: [12,1,2])
+    :type temporal_resolution: interger array
+
+    :returns: A temporal subset OCW Dataset
+    :rtype: Open Climate Workbench Dataset Object
+    """
+
+    dates = target_dataset.times
+    months = []
+    for date in dates:
+        months.append(date.month) 
+    months = np.array(months)
+    time_index = []
+    for m_value in month_index:
+        time_index = np.append(time_index, np.where(months == m_value)[0]) 
+        if m_value == month_index[0]:
+            time_index_first = np.min(np.where(months == m_value)[0])
+        if m_value == month_index[-1]:
+            time_index_last = np.max(np.where(months == m_value)[0])
+
+    time_index = np.sort(time_index)
+  
+    time_index = time_index[np.where((time_index >= time_index_first) & (time_index
<= time_index_last))]
+
+    time_index = list(time_index)
+    
+    new_dataset = ds.Dataset(target_dataset.lats,
+                             target_dataset.lons,
+                             target_dataset.times[time_index],
+                             target_dataset.values[time_index,:],
+                             target_dataset.variable,
+                             target_dataset.name) 
+    return new_dataset
+    
 def temporal_rebin(target_dataset, temporal_resolution):
     """ Rebin a Dataset to a new temporal resolution
     
@@ -65,6 +106,37 @@ def temporal_rebin(target_dataset, temporal_resolution):
     
     return new_dataset
 
+def spatial_aggregation(target_dataset, lon_min, lon_max, lat_min, lat_max):
+    """ Spatially subset a dataset within the given longitude and latitude boundaryd_lon-grid_space,
grid_lon+grid_space
+
+    :param target_dataset: Dataset object that needs spatial subsetting
+    :type target_dataset: Open Climate Workbench Dataset Object
+    :param lon_min: minimum longitude (western boundary)
+    :type lon_min: float
+    :param lon_max: maximum longitude (eastern boundary)
+    :type lon_min: float
+    :param lat_min: minimum latitude (southern boundary) 
+    :type lat_min: float
+    :param lat_min: maximum latitude (northern boundary) 
+    :type lat_min: float
+
+    :returns: A new spatially subset Dataset
+    :rtype: Open Climate Workbench Dataset Object
+    """
+
+    new_lon, new_lat = np.meshgrid(target_dataset.lons, target_dataset.lats)
+    y_index, x_index = np.where((new_lon >= lon_min) & (new_lon <= lon_max) &
(new_lat >= lat_min) & (new_lat <= lat_max))[0:2]
+
+    new_dataset = ds.Dataset(target_dataset.lats[y_index.min():y_index.max()+1],
+                             target_dataset.lons[x_index.min():x_index.max()+1],
+                             target_dataset.times,
+                             target_dataset.values[:,y_index.min():y_index.max()+1,x_index.min():x_index.max()+1],
+                             target_dataset.variable,
+                             target_dataset.name) 
+    return new_dataset 
+     
+
+
 def spatial_regrid(target_dataset, new_latitudes, new_longitudes):
     """ Regrid a Dataset using the new latitudes and longitudes
 

http://git-wip-us.apache.org/repos/asf/climate/blob/cfb120e5/ocw/utils.py
----------------------------------------------------------------------
diff --git a/ocw/utils.py b/ocw/utils.py
old mode 100644
new mode 100755
index 8c109b7..cbdf85c
--- a/ocw/utils.py
+++ b/ocw/utils.py
@@ -23,6 +23,7 @@ import numpy as np
 
 from mpl_toolkits.basemap import shiftgrid
 from dateutil.relativedelta import relativedelta
+from netCDF4 import num2date
 
 def decode_time_values(dataset, time_var_name):
     ''' Decode NetCDF time values into Python datetime objects.
@@ -52,10 +53,17 @@ def decode_time_values(dataset, time_var_name):
         for time_val in time_data:
             times.append(time_base + relativedelta(months=int(time_val)))
     else:
-        for time_val in time_data:
-            arg[time_units] = time_val
-            times.append(time_base + dt.timedelta(**arg))
+    #    for time_val in time_data:
+    #        arg[time_units] = time_val
+    #        times.append(time_base + dt.timedelta(**arg))
+    
+        times_calendar = 'standard'
+        try:
+            times_calendar = time_data.calendar
+        except:
+            pass
 
+        times = num2date(time_data, units = time_format, calendar = times_calendar)
     return times
 
 def parse_time_units(time_format):
@@ -281,33 +289,3 @@ def calc_climatology_year(dataset):
 
     return annually_mean, total_mean
 
-def calc_climatology_season(month_start, month_end, dataset):
-    ''' Calculate seasonal mean and time series for given months.
-
-    :param month_start: An integer for beginning month (Jan=1)
-    :type month_start: Integer
-    :param month_end: An integer for ending month (Jan=1)
-    :type month_end: Integer
-    :param dataset: Dataset object with full-year format
-    :type dataset: ocw.dataset.Dataset object
-
-    :returns:  
-        t_series - monthly average over the given season
-        means - mean over the entire season
-    :rtype: A tuple of two numpy arrays
-    '''
-
-    if month_start > month_end:
-        # Offset the original array so that the the first month
-        # becomes month_start, note that this cuts off the first year of data
-        offset = slice(month_start - 1, month_start - 13)
-        reshape_data = reshape_monthly_to_annually(dataset[offset])
-        month_index = slice(0, 13 - month_start + month_end)
-    else:
-        # Since month_start <= month_end, just take a slice containing those months
-        reshape_data = reshape_monthly_to_annually(dataset)
-        month_index = slice(month_start - 1, month_end)
-    
-    t_series = reshape_data[:, month_index].mean(axis=1)
-    means = t_series.mean(axis=0)
-    return t_series, means


Mime
View raw message