climate-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From huiky...@apache.org
Subject [1/4] climate git commit: initial commit
Date Fri, 25 Sep 2015 15:57:28 GMT
Repository: climate
Updated Branches:
  refs/heads/master 66413cf0f -> e78ac8eee


initial commit


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/4be65b18
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/4be65b18
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/4be65b18

Branch: refs/heads/master
Commit: 4be65b18e4b26f0bd29dd4824310c6fa6a921554
Parents: 7f34fc3
Author: huikyole <huikyole@argo.jpl.nasa.gov>
Authored: Sun Sep 20 22:04:31 2015 -0700
Committer: huikyole <huikyole@argo.jpl.nasa.gov>
Committed: Sun Sep 20 22:04:31 2015 -0700

----------------------------------------------------------------------
 examples/cordex-AF_tasmax_bias_to_cru.yaml      |  46 ++++
 examples/example_package.py                     | 219 +++++++++++++++++++
 .../narccap_prec_JJA_taylor_diagram_to_cru.yaml |  44 ++++
 examples/run_RCMES.py                           | 213 ++++++++++++++++++
 ocw/data_source/local.py                        |  12 +-
 5 files changed, 526 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/4be65b18/examples/cordex-AF_tasmax_bias_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/cordex-AF_tasmax_bias_to_cru.yaml b/examples/cordex-AF_tasmax_bias_to_cru.yaml
new file mode 100644
index 0000000..042a9a3
--- /dev/null
+++ b/examples/cordex-AF_tasmax_bias_to_cru.yaml
@@ -0,0 +1,46 @@
+workdir: ./
+output_netcdf_filename: cordex-AF_CRU_taxmax_monthly_1990-2007.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year
is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: False
+
+space:
+    min_lat: -45.76
+    max_lat: 42.24
+    min_lon: -24.64
+    max_lon: 60.28
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ./data/AFRICA*tasmax.nc                                                   

+        variable: tasmax  
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-AF_tasmax_annual_mean_bias_to_cru
+    subplots_array: !!python/tuple [3,4] 
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/4be65b18/examples/example_package.py
----------------------------------------------------------------------
diff --git a/examples/example_package.py b/examples/example_package.py
new file mode 100644
index 0000000..b564da5
--- /dev/null
+++ b/examples/example_package.py
@@ -0,0 +1,219 @@
+#Apache OCW lib immports
+import ocw.data_source.local as local
+import ocw.plotter as plotter
+import ocw.utils as utils
+from ocw.evaluation import Evaluation
+import ocw.metrics as metrics
+
+# Python libraries
+import numpy as np
+import numpy.ma as ma
+import matplotlib.pyplot as plt
+from mpl_toolkits.basemap import Basemap 
+from matplotlib import rcParams
+from matplotlib.patches import Polygon
+import string
+
+def Map_plot_bias_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
+                                      file_name, row, column):
+    '''Draw maps of observed multi-year climatology and biases of models"'''
+
+    # calculate climatology of observation data
+    obs_clim = utils.calc_temporal_mean(obs_dataset)
+    # determine the metrics
+    map_of_bias = metrics.TemporalMeanBias()
+
+    # create the Evaluation object
+    bias_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
+                                 model_datasets, # list of target datasets for the evaluation
+                                 [map_of_bias, map_of_bias])
+
+    # run the evaluation (bias calculation)
+    bias_evaluation.run() 
+
+    rcm_bias = bias_evaluation.results[0]
+
+    fig = plt.figure()
+
+    lat_min = obs_dataset.lats.min()
+    lat_max = obs_dataset.lats.max()
+    lon_min = obs_dataset.lons.min()
+    lon_max = obs_dataset.lons.max()
+
+    string_list = list(string.ascii_lowercase) 
+    ax = fig.add_subplot(row,column,1)
+    m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
+            llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
+    lons, lats = np.meshgrid(obs_dataset.lons, obs_dataset.lats)
+
+    x,y = m(lons, lats)
+
+    m.drawcoastlines(linewidth=1)
+    m.drawcountries(linewidth=1)
+    m.drawstates(linewidth=0.5, color='w')
+    max = m.contourf(x,y,obs_clim,levels = plotter._nice_intervals(obs_dataset.values, 10),
extend='both',cmap='PuOr')
+    ax.annotate('(a) \n' + obs_name,xy=(lon_min, lat_min))
+    cax = fig.add_axes([0.02, 1.-float(1./row), 0.01, 1./row*0.6])
+    plt.colorbar(max, cax = cax) 
+    clevs = plotter._nice_intervals(rcm_bias, 11)
+    for imodel in np.arange(len(model_datasets)):
+        ax = fig.add_subplot(row, column,2+imodel)
+        m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
+                llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
+        m.drawcoastlines(linewidth=1)
+        m.drawcountries(linewidth=1)
+        m.drawstates(linewidth=0.5, color='w')
+        max = m.contourf(x,y,rcm_bias[imodel,:],levels = clevs, extend='both', cmap='RdBu_r')
+        ax.annotate('('+string_list[imodel+1]+')  \n '+model_names[imodel],xy=(lon_min, lat_min))
+
+    cax = fig.add_axes([0.91, 0.1, 0.015, 0.8])
+    plt.colorbar(max, cax = cax) 
+
+    plt.subplots_adjust(hspace=0.01,wspace=0.05)
+
+    plt.show()
+    fig.savefig(file_name,dpi=600,bbox_inches='tight')
+
+def Taylor_diagram_spatial_pattern_of_multiyear_climatology(obs_dataset, obs_name, model_datasets,
model_names,
+                                      file_name):
+
+    # calculate climatological mean fields
+    obs_dataset.values = utils.calc_temporal_mean(obs_dataset)
+    for dataset in model_datasets:
+        dataset.values = utils.calc_temporal_mean(dataset)
+
+    # Metrics (spatial standard deviation and pattern correlation)
+    # determine the metrics
+    taylor_diagram = metrics.SpatialPatternTaylorDiagram()
+
+    # create the Evaluation object
+    taylor_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
+                                 model_datasets, # list of target datasets for the evaluation
+                                 [taylor_diagram])
+
+    # run the evaluation (bias calculation)
+    taylor_evaluation.run() 
+
+    taylor_data = taylor_evaluation.results[0]
+
+    plotter.draw_taylor_diagram(taylor_data, model_names, obs_name, file_name, pos='upper
right',frameon=False)
+
+def Time_series_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names,
seasonal_cycle, 
+                          file_name, row, column, x_tick=['']):
+
+    nmodel, nt, nregion = model_subregion_mean.shape  
+
+    if seasonal_cycle:
+        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
+        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
+        nt = 12
+    else:
+        obs_data = obs_subregion_mean
+        model_data = model_subregion_mean
+        
+    x_axis = np.arange(nt)
+    x_tick_values = x_axis
+
+    fig = plt.figure()
+    rcParams['xtick.labelsize'] = 6
+    rcParams['ytick.labelsize'] = 6
+  
+    for iregion in np.arange(nregion):
+        ax = fig.add_subplot(row, column, iregion+1) 
+        x_tick_labels = ['']
+        if iregion+1  > column*(row-1):
+            x_tick_labels = x_tick 
+        else:
+            x_tick_labels=['']
+        ax.plot(x_axis, obs_data[0, :, iregion], color='r', lw=2, label=obs_name)
+        for imodel in np.arange(nmodel):
+            ax.plot(x_axis, model_data[imodel, :, iregion], lw=0.5, label = model_names[imodel])
+        ax.set_xlim([-0.5,nt-0.5])
+        ax.set_xticks(x_tick_values)
+        ax.set_xticklabels(x_tick_labels)
+        ax.set_title('Region %02d' % (iregion+1), fontsize=8)
+    
+    ax.legend(bbox_to_anchor=(-0.2, row/2), loc='center' , prop={'size':7}, frameon=False)
 
+
+    fig.subplots_adjust(hspace=0.7, wspace=0.5)
+    plt.show()
+    fig.savefig(file_name, dpi=600, bbox_inches='tight')
+
+def Portrait_diagram_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names,
seasonal_cycle,
+                               file_name, normalize=True):
+
+    nmodel, nt, nregion = model_subregion_mean.shape
+    
+    if seasonal_cycle:
+        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
+        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
+        nt = 12
+    else:
+        obs_data = obs_subregion_mean
+        model_data = model_subregion_mean
+
+    subregion_metrics = ma.zeros([4, nregion, nmodel])
+
+    for imodel in np.arange(nmodel):
+        for iregion in np.arange(nregion):
+            # First metric: bias
+            subregion_metrics[0, iregion, imodel] = metrics.calc_bias(model_data[imodel,
:, iregion], obs_data[0, :, iregion], average_over_time = True)
+            # Second metric: standard deviation
+            subregion_metrics[1, iregion, imodel] = metrics.calc_stddev_ratio(model_data[imodel,
:, iregion], obs_data[0, :, iregion])
+            # Third metric: RMSE
+            subregion_metrics[2, iregion, imodel] = metrics.calc_rmse(model_data[imodel,
:, iregion], obs_data[0, :, iregion])
+            # Fourth metric: correlation
+            subregion_metrics[3, iregion, imodel] = metrics.calc_correlation(model_data[imodel,
:, iregion], obs_data[0, :, iregion])
+   
+    if normalize:
+        for iregion in np.arange(nregion):
+            subregion_metrics[0, iregion, : ] = subregion_metrics[0, iregion, : ]/ma.std(obs_data[0,
:, iregion])*100. 
+            subregion_metrics[1, iregion, : ] = subregion_metrics[1, iregion, : ]*100. 
+            subregion_metrics[2, iregion, : ] = subregion_metrics[2, iregion, : ]/ma.std(obs_data[0,
:, iregion])*100. 
+
+    region_names = ['R%02d' % i for i in np.arange(nregion)+1]
+
+    for imetric, metric in enumerate(['bias','std','RMSE','corr']):
+        plotter.draw_portrait_diagram(subregion_metrics[imetric, :, :], region_names, model_names,
file_name+'_'+metric, 
+                                      xlabel='model',ylabel='region')             
+
+def Map_plot_subregion(subregions, ref_dataset, directory):
+  
+    lons, lats = np.meshgrid(ref_dataset.lons, ref_dataset.lats) 
+    fig = plt.figure()
+    ax = fig.add_subplot(111)
+    m = Basemap(ax=ax, projection='cyl',llcrnrlat = lats.min(), urcrnrlat = lats.max(),
+                llcrnrlon = lons.min(), urcrnrlon = lons.max(), resolution = 'l')
+    m.drawcoastlines(linewidth=0.75)
+    m.drawcountries(linewidth=0.75)
+    m.etopo()  
+    x, y = m(lons, lats) 
+    #subregion_array = ma.masked_equal(subregion_array, 0)
+    #max=m.contourf(x, y, subregion_array, alpha=0.7, cmap='Accent')
+    for subregion in subregions:
+        draw_screen_poly(subregion[1], m, 'w') 
+        plt.annotate(subregion[0],xy=(0.5*(subregion[1][2]+subregion[1][3]), 0.5*(subregion[1][0]+subregion[1][1])),
ha='center',va='center', fontsize=8) 
+    plt.show()
+    fig.savefig(directory+'map_subregion', bbox_inches='tight')
+
+def draw_screen_poly(boundary_array, m, linecolor='k'):
+
+    ''' Draw a polygon on a map
+
+    :param boundary_array: [lat_north, lat_south, lon_east, lon_west]
+    :param m   : Basemap object
+    '''
+
+    lats = [boundary_array[0], boundary_array[0], boundary_array[1], boundary_array[1]]
+    lons = [boundary_array[3], boundary_array[2], boundary_array[2], boundary_array[3]]
+    x, y = m( lons, lats )
+    xy = zip(x,y)
+    poly = Polygon( xy, facecolor='none',edgecolor=linecolor )
+    plt.gca().add_patch(poly)
+    
+    
+   
+
+    
+
+    

http://git-wip-us.apache.org/repos/asf/climate/blob/4be65b18/examples/narccap_prec_JJA_taylor_diagram_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/narccap_prec_JJA_taylor_diagram_to_cru.yaml b/examples/narccap_prec_JJA_taylor_diagram_to_cru.yaml
new file mode 100644
index 0000000..c6b96cf
--- /dev/null
+++ b/examples/narccap_prec_JJA_taylor_diagram_to_cru.yaml
@@ -0,0 +1,44 @@
+workdir: ./                                      
+output_netcdf_filename: narccap_prec_JJA_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year
is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 6
+    month_end: 8
+    average_each_year: True  
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ./data/prec.*ncep.monavg.nc                                               
    
+        variable: prec    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots1:
+    file_name: narccap_prec_JJA_mean_taylor_diagram_to_cru
+
+use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/4be65b18/examples/run_RCMES.py
----------------------------------------------------------------------
diff --git a/examples/run_RCMES.py b/examples/run_RCMES.py
new file mode 100644
index 0000000..33c7cb9
--- /dev/null
+++ b/examples/run_RCMES.py
@@ -0,0 +1,213 @@
+#Apache OCW lib immports
+import ocw.dataset_processor as dsp
+import ocw.data_source.local as local
+import ocw.data_source.rcmed as rcmed
+import ocw.plotter as plotter
+import ocw.utils as utils
+from ocw.dataset import Bounds
+
+import matplotlib.pyplot as plt
+from matplotlib import rcParams
+import numpy as np
+import numpy.ma as ma
+import yaml
+from glob import glob
+import operator
+from dateutil import parser
+from datetime import datetime
+import os
+import sys
+
+from example_package import *
+
+import ssl
+if hasattr(ssl, '_create_unverified_context'):
+  ssl._create_default_https_context = ssl._create_unverified_context
+
+config_file = str(sys.argv[1])
+
+print 'Reading the configuration file ', config_file
+config = yaml.load(open(config_file))
+time_info = config['time']
+temporal_resolution = time_info['temporal_resolution']
+
+start_time = datetime.strptime(time_info['start_time'].strftime('%Y%m%d'),'%Y%m%d')
+end_time = datetime.strptime(time_info['end_time'].strftime('%Y%m%d'),'%Y%m%d')
+
+space_info = config['space']
+min_lat = space_info['min_lat']
+max_lat = space_info['max_lat']
+min_lon = space_info['min_lon']
+max_lon = space_info['max_lon']
+
+""" Step 1: Load the reference data """
+ref_data_info = config['datasets']['reference']
+print 'Loading observation dataset:\n',ref_data_info
+ref_name = ref_data_info['data_name']
+if ref_data_info['data_source'] == 'local':
+    ref_dataset = local.load_file(ref_data_info['path'],
+                                  ref_data_info['variable'], name=ref_name)
+elif ref_data_info['data_source'] == 'rcmed':
+      ref_dataset = rcmed.parameter_dataset(ref_data_info['dataset_id'],
+                                            ref_data_info['parameter_id'],
+                                            min_lat, max_lat, min_lon, max_lon,
+                                            start_time, end_time)
+else:
+    print ' '
+    # TO DO: support ESGF
+
+ref_dataset =  dsp.normalize_dataset_datetimes(ref_dataset, temporal_resolution)
+
+""" Step 2: Load model NetCDF Files into OCW Dataset Objects """
+model_data_info = config['datasets']['targets']
+print 'Loading model datasets:\n',model_data_info
+if model_data_info['data_source'] == 'local':
+    model_datasets, model_names = local.load_multiple_files(file_path = model_data_info['path'],
+                                                            variable_name =model_data_info['variable'])
+else:
+    print ' '
+    # TO DO: support RCMED and ESGF
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.normalize_dataset_datetimes(dataset, temporal_resolution)
+
+""" Step 3: Subset the data for temporal and spatial domain """
+# Create a Bounds object to use for subsetting
+if time_info['maximum_overlap_period']:
+    start_time, end_time = utils.get_temporal_overlap([ref_dataset]+model_datasets)
+    print 'Maximum overlap period'
+    print 'start_time:', start_time
+    print 'end_time:', end_time
+
+if temporal_resolution == 'monthly' and end_time.day !=1:
+    end_time = end_time.replace(day=1)
+if ref_data_info['data_source'] == 'rcmed':
+    min_lat = np.max([min_lat, ref_dataset.lats.min()])
+    max_lat = np.min([max_lat, ref_dataset.lats.max()])
+    min_lon = np.max([min_lon, ref_dataset.lons.min()])
+    max_lon = np.min([max_lon, ref_dataset.lons.max()])
+bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
+ref_dataset = dsp.subset(bounds,ref_dataset)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.subset(bounds,dataset)
+
+# Temporaly subset both observation and model datasets for the user specified season
+month_start = time_info['month_start']
+month_end = time_info['month_end']
+average_each_year = time_info['average_each_year']
+
+ref_dataset = dsp.temporal_subset(month_start, month_end,ref_dataset,average_each_year)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.temporal_subset(month_start, month_end,dataset,average_each_year)
+
+# generate grid points for regridding
+if config['regrid']['regrid_on_reference']:
+    new_lat = ref_dataset.lats
+    new_lon = ref_dataset.lons 
+else:
+    delta_lat = config['regrid']['regrid_dlat']
+    delta_lon = config['regrid']['regrid_dlon']
+    nlat = (max_lat - min_lat)/delta_lat+1
+    nlon = (max_lon - min_lon)/delta_lon+1
+    new_lat = np.linspace(min_lat, max_lat, nlat)
+    new_lon = np.linspace(min_lon, max_lon, nlon)
+
+# number of models
+nmodel = len(model_datasets)
+print 'Dataset loading completed'
+print 'Observation data:', ref_name 
+print 'Number of model datasets:',nmodel
+for model_name in model_names:
+    print model_name
+
+""" Step 4: Spatial regriding of the reference datasets """
+print 'Regridding datasets: ', config['regrid']
+if not config['regrid']['regrid_on_reference']:
+    ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon)
+
+print 'Propagating missing data information'
+ref_dataset = dsp.mask_missing_data([ref_dataset]+model_datasets)[0]
+model_datasets = dsp.mask_missing_data([ref_dataset]+model_datasets)[1:]
+
+""" Step 5: Checking and converting variable units """
+print 'Checking and converting variable units'
+ref_dataset = dsp.variable_unit_conversion(ref_dataset)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.variable_unit_conversion(dataset)
+    
+
+print 'Generating multi-model ensemble'
+model_datasets.append(dsp.ensemble(model_datasets))
+model_names.append('ENS-models')
+
+""" Step 6: Generate subregion average and standard deviation """
+if config['use_subregions']:
+    # sort the subregion by region names and make a list
+    subregions= sorted(config['subregions'].items(),key=operator.itemgetter(0))
+
+    # number of subregions
+    nsubregion = len(subregions)
+
+    print 'Calculating spatial averages and standard deviations of ',str(nsubregion),' subregions'
+
+    ref_subregion_mean, ref_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std([ref_dataset],
subregions) 
+    model_subregion_mean, model_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std(model_datasets,
subregions) 
+
+""" Step 7: Write a netCDF file """
+workdir = config['workdir']
+if workdir[-1] != '/':
+    workdir = workdir+'/'
+print 'Writing a netcdf file: ',workdir+config['output_netcdf_filename']
+if not os.path.exists(workdir):
+    os.system("mkdir "+workdir)
+
+if config['use_subregions']:
+    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets,
model_names,
+                                                       path=workdir+config['output_netcdf_filename'],
+                                                       subregions=subregions, subregion_array
= subregion_array, 
+                                                       ref_subregion_mean=ref_subregion_mean,
ref_subregion_std=ref_subregion_std,
+                                                       model_subregion_mean=model_subregion_mean,
model_subregion_std=model_subregion_std)
+else:
+    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets,
model_names,
+                                                       path=workdir+config['output_netcdf_filename'])
+
+""" Step 8: Calculate metrics and draw plots """
+nmetrics = config['number_of_metrics_and_plots']
+if config['use_subregions']:
+    Map_plot_subregion(subregions, ref_dataset, workdir)
+
+if nmetrics > 0:
+    print 'Calculating metrics and generating plots'
+    for imetric in np.arange(nmetrics)+1:
+        metrics_name = config['metrics'+'%1d' %imetric]
+        plot_info = config['plots'+'%1d' %imetric]
+        file_name = workdir+plot_info['file_name']
+
+        print 'metrics '+str(imetric)+'/'+str(nmetrics)+': ', metrics_name
+        if metrics_name == 'Map_plot_bias_of_multiyear_climatology':
+            row, column = plot_info['subplots_array']
+            Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets,
model_names,
+                                      file_name, row, column)
+        elif metrics_name == 'Taylor_diagram_spatial_pattern_of_multiyear_climatology':
+            Taylor_diagram_spatial_pattern_of_multiyear_climatology(ref_dataset, ref_name,
model_datasets, model_names,
+                                      file_name)
+        elif config['use_subregions']:
+            if metrics_name == 'Timeseries_plot_subregion_interannual_variability' and average_each_year:
+                row, column = plot_info['subplots_array']
+                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean,
model_names, False,
+                                      file_name, row, column, x_tick=['Y'+str(i+1) for i
in np.arange(model_subregion_mean.shape[1])])
+            if metrics_name == 'Timeseries_plot_subregion_annual_cycle' and not average_each_year
and month_start==1 and month_end==12:
+                row, column = plot_info['subplots_array']
+                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean,
model_names, True,
+                                      file_name, row, column, x_tick=['J','F','M','A','M','J','J','A','S','O','N','D'])
+            if metrics_name == 'Portrait_diagram_subregion_interannual_variability' and average_each_year:
+                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean,
model_names, False,
+                                      file_name)
+            if metrics_name == 'Portrait_diagram_subregion_annual_cycle' and not average_each_year
and month_start==1 and month_end==12:
+                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean,
model_names, True,
+                                      file_name)
+        else:
+            print 'please check the currently supported metrics'
+
+

http://git-wip-us.apache.org/repos/asf/climate/blob/4be65b18/ocw/data_source/local.py
----------------------------------------------------------------------
diff --git a/ocw/data_source/local.py b/ocw/data_source/local.py
index c0d4b07..60fcb50 100644
--- a/ocw/data_source/local.py
+++ b/ocw/data_source/local.py
@@ -271,19 +271,16 @@ def load_file(file_path,
                    units=variable_unit, name=name, origin=origin)
 
 def load_multiple_files(file_path,
-                        filename_pattern,
                         variable_name,
-                        dataset_name='ref',
+                        dataset_name='model',
                         variable_unit=None,
                         lat_name=None,
                         lon_name=None,
                         time_name=None):
     ''' load multiple netcdf files with common filename pattern and return an array of OCW
datasets
 
-    :param file_path: directory name where the NetCDF files to load are stored.
+    :param file_path: directory name and common file name patterns where the NetCDF files
to load are stored.
     :type file_path: :mod:`string`
-    :param filename_pattern: common file name patterns
-    :type filename_pattern: :list:`string`
     :param dataset_name: a name of dataset when reading a single file 
     :type dataset_name: :mod:'string'
     :param variable_name: The variable name to load from the NetCDF file.
@@ -310,8 +307,7 @@ def load_multiple_files(file_path,
     '''
 
     data_filenames = []
-    for pattern in filename_pattern:
-        data_filenames.extend(glob(file_path + pattern))
+    data_filenames.extend(glob(file_path))
     data_filenames.sort()
 
     # number of files
@@ -333,4 +329,4 @@ def load_multiple_files(file_path,
         datasets.append(load_file(filename, variable_name, variable_unit, name=data_name[ifile],
                         lat_name=lat_name, lon_name=lon_name, time_name=time_name))
     
-    return datasets
+    return datasets, data_name


Mime
View raw message