climate-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From huiky...@apache.org
Subject [1/3] climate git commit: CLIMATE-676 - Cleaning up the examples
Date Fri, 25 Sep 2015 16:33:19 GMT
Repository: climate
Updated Branches:
  refs/heads/master e78ac8eee -> c9813e36b


http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/simple_model_to_model_bias.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/simple_model_to_model_bias.py b/examples/old_examples/simple_model_to_model_bias.py
deleted file mode 100644
index 635e872..0000000
--- a/examples/old_examples/simple_model_to_model_bias.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import datetime
-from os import path
-import urllib
-
-import numpy as np
-
-import ocw.data_source.local as local
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Two Local Model Files 
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
-FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc"
-# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "wrf_bias_compared_to_knmi"
-
-FILE_1_PATH = path.join('/tmp', FILE_1)
-FILE_2_PATH = path.join('/tmp', FILE_2)
-
-if not path.exists(FILE_1_PATH):
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1_PATH)
-if not path.exists(FILE_2_PATH):
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2_PATH)
-
-""" Step 1: Load Local NetCDF Files into OCW Dataset Objects """
-print("Loading %s into an OCW Dataset Object" % (FILE_1_PATH,))
-knmi_dataset = local.load_file(FILE_1_PATH, "tasmax")
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
-
-print("Loading %s into an OCW Dataset Object" % (FILE_2_PATH,))
-wrf_dataset = local.load_file(FILE_2_PATH, "tasmax")
-print("WRF_Dataset.values shape: (times, lats, lons) - %s \n" % (wrf_dataset.values.shape,))
-
-""" Step 2: Temporally Rebin the Data into an Annual Timestep """
-print("Temporally Rebinning the Datasets to an Annual Timestep")
-knmi_dataset = dsp.temporal_rebin(knmi_dataset, datetime.timedelta(days=365))
-wrf_dataset = dsp.temporal_rebin(wrf_dataset, datetime.timedelta(days=365))
-print("KNMI_Dataset.values shape: %s" % (knmi_dataset.values.shape,))
-print("WRF_Dataset.values shape: %s \n\n" % (wrf_dataset.values.shape,))
-
-""" Step 3: Spatially Regrid the Dataset Objects to a 1 degree grid """
-#  The spatial_boundaries() function returns the spatial extent of the dataset
-print("The KNMI_Dataset spatial bounds (min_lat, max_lat, min_lon, max_lon) are: \n"
-      "%s\n" % (knmi_dataset.spatial_boundaries(), ))
-print("The KNMI_Dataset spatial resolution (lat_resolution, lon_resolution) is: \n"
-      "%s\n\n" % (knmi_dataset.spatial_resolution(), ))
-
-min_lat, max_lat, min_lon, max_lon = knmi_dataset.spatial_boundaries()
-
-# Using the bounds we will create a new set of lats and lons on 1 degree step
-new_lons = np.arange(min_lon, max_lon, 1)
-new_lats = np.arange(min_lat, max_lat, 1)
-
-# Spatially regrid datasets using the new_lats, new_lons numpy arrays
-print("Spatially Regridding the KNMI_Dataset...")
-knmi_dataset = dsp.spatial_regrid(knmi_dataset, new_lats, new_lons)
-print("Final shape of the KNMI_Dataset: \n"
-      "%s\n" % (knmi_dataset.values.shape, ))
-print("Spatially Regridding the WRF_Dataset...")
-wrf_dataset = dsp.spatial_regrid(wrf_dataset, new_lats, new_lons)
-print("Final shape of the WRF_Dataset: \n"
-      "%s\n" % (wrf_dataset.values.shape, ))
-
-""" Step 4:  Build a Metric to use for Evaluation - Bias for this example """
-# You can build your own metrics, but OCW also ships with some common metrics
-print("Setting up a Bias metric to use for evaluation")
-bias = metrics.Bias()
-
-""" Step 5: Create an Evaluation Object using Datasets and our Metric """
-# The Evaluation Class Signature is:
-# Evaluation(reference, targets, metrics, subregions=None)
-# Evaluation can take in multiple targets and metrics, so we need to convert
-# our examples into Python lists.  Evaluation will iterate over the lists
-print("Making the Evaluation definition")
-bias_evaluation = evaluation.Evaluation(knmi_dataset, [wrf_dataset], [bias])
-print("Executing the Evaluation using the object's run() method")
-bias_evaluation.run()
-
-""" Step 6: Make a Plot from the Evaluation.results """
-# The Evaluation.results are a set of nested lists to support many different
-# possible Evaluation scenarios.
-#
-# The Evaluation results docs say:
-# The shape of results is (num_metrics, num_target_datasets) if no subregion
-# Accessing the actual results when we have used 1 metric and 1 dataset is
-# done this way:
-print("Accessing the Results of the Evaluation run")
-results = bias_evaluation.results[0][0]
-print("The results are of type: %s" % type(results))
-
-# From the bias output I want to make a Contour Map of the region
-print("Generating a contour map using ocw.plotter.draw_contour_map()")
-
-lats = new_lats
-lons = new_lons
-fname = OUTPUT_PLOT
-gridshape = (4, 5) # 20 Years worth of plots. 20 rows in 1 column
-plot_title = "TASMAX Bias of WRF Compared to KNMI (1989 - 2008)"
-sub_titles = range(1989, 2009, 1)
-
-plotter.draw_contour_map(results, lats, lons, fname, 
-                         gridshape=gridshape, ptitle=plot_title, 
-                         subtitles=sub_titles)

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/simple_model_to_model_bias_DJF_and_JJA.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/simple_model_to_model_bias_DJF_and_JJA.py b/examples/old_examples/simple_model_to_model_bias_DJF_and_JJA.py
deleted file mode 100644
index 364498a..0000000
--- a/examples/old_examples/simple_model_to_model_bias_DJF_and_JJA.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import datetime
-from os import path
-import urllib
-
-import numpy as np
-
-import ocw.data_source.local as local
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-import ocw.utils as utils
-
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Two Local Model Files 
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
-FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc"
-# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "wrf_bias_compared_to_knmi"
-
-FILE_1_PATH = path.join('/tmp', FILE_1)
-FILE_2_PATH = path.join('/tmp', FILE_2)
-
-if not path.exists(FILE_1_PATH):
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1_PATH)
-if not path.exists(FILE_2_PATH):
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2_PATH)
-
-""" Step 1: Load Local NetCDF Files into OCW Dataset Objects """
-print("Loading %s into an OCW Dataset Object" % (FILE_1_PATH,))
-knmi_dataset = local.load_file(FILE_1_PATH, "tasmax")
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
-
-print("Loading %s into an OCW Dataset Object" % (FILE_2_PATH,))
-wrf_dataset = local.load_file(FILE_2_PATH, "tasmax")
-print("WRF_Dataset.values shape: (times, lats, lons) - %s \n" % (wrf_dataset.values.shape,))
-
-""" Step 2: Calculate seasonal average """
-print("Calculate seasonal average")
-knmi_DJF_mean = utils.calc_temporal_mean(dsp.temporal_subset(month_start=12, month_end=2,
target_dataset=knmi_dataset))
-wrf_DJF_mean = utils.calc_temporal_mean(dsp.temporal_subset(month_start=12, month_end=2,
target_dataset=wrf_dataset))
-print("Seasonally averaged KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_DJF_mean.shape,))
-print("Seasonally averaged wrf_Dataset.values shape: (times, lats, lons) - %s \n" % (wrf_DJF_mean.shape,))
-knmi_JJA_mean = utils.calc_temporal_mean(dsp.temporal_subset(month_start=6, month_end=8,
target_dataset=knmi_dataset))
-wrf_JJA_mean = utils.calc_temporal_mean(dsp.temporal_subset(month_start=6, month_end=8, target_dataset=wrf_dataset))
-

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/simple_model_tstd.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/simple_model_tstd.py b/examples/old_examples/simple_model_tstd.py
deleted file mode 100644
index 4c87813..0000000
--- a/examples/old_examples/simple_model_tstd.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-from os import path
-import urllib
-
-import ocw.data_source.local as local
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# One Local Model Files
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
-
-# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "knmi_temporal_std"
-
-# Download necessary NetCDF file if needed
-if path.exists(FILE_1):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
-
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects """
-print "Loading %s into an OCW Dataset Object" % (FILE_1,)
-# 'tasmax' is variable name of values
-knmi_dataset = local.load_file(FILE_1, "tasmax")
-
-print "KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,)
-
-# Acessing latittudes and longitudes of netCDF file
-lats = knmi_dataset.lats
-lons = knmi_dataset.lons
-
-""" Step 2:  Build a Metric to use for Evaluation - Temporal STD for this example """
-# You can build your own metrics, but OCW also ships with some common metrics
-print "Setting up a Temporal STD metric to use for evaluation"
-std = metrics.TemporalStdDev()
-
-""" Step 3: Create an Evaluation Object using Datasets and our Metric """
-# The Evaluation Class Signature is:
-# Evaluation(reference, targets, metrics, subregions=None)
-# Evaluation can take in multiple targets and metrics, so we need to convert
-# our examples into Python lists.  Evaluation will iterate over the lists
-print "Making the Evaluation definition"
-# Temporal STD Metric gets one target dataset then reference dataset should be None
-std_evaluation = evaluation.Evaluation(None, [knmi_dataset], [std])
-print "Executing the Evaluation using the object's run() method"
-std_evaluation.run()
-
-""" Step 4: Make a Plot from the Evaluation.results """
-# The Evaluation.results are a set of nested lists to support many different
-# possible Evaluation scenarios.
-#
-# The Evaluation results docs say:
-# The shape of results is (num_metrics, num_target_datasets) if no subregion
-# Accessing the actual results when we have used 1 metric and 1 dataset is
-# done this way:
-print "Accessing the Results of the Evaluation run"
-results = std_evaluation.unary_results[0][0]
-print "The results are of type: %s" % type(results)
-
-# From the temporal std output I want to make a Contour Map of the region
-print "Generating a contour map using ocw.plotter.draw_contour_map()"
-
-fname = OUTPUT_PLOT
-gridshape = (4, 5) # 20 Years worth of plots. 20 rows in 1 column
-plot_title = "TASMAX Temporal Standard Deviation (1989 - 2008)"
-sub_titles = range(1989, 2009, 1)
-
-plotter.draw_contour_map(results, lats, lons, fname,
-                         gridshape=gridshape, ptitle=plot_title,
-                         subtitles=sub_titles)

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/subregions.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/subregions.py b/examples/old_examples/subregions.py
deleted file mode 100644
index 20aaee9..0000000
--- a/examples/old_examples/subregions.py
+++ /dev/null
@@ -1,53 +0,0 @@
-#Apache OCW lib immports
-from ocw.dataset import Dataset, Bounds
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-import ocw.utils as utils
-
-import datetime
-import numpy as np
-import numpy.ma as ma
-
-OUTPUT_PLOT = "subregions"
-
-# Spatial and temporal configurations
-LAT_MIN = -45.0 
-LAT_MAX = 42.24
-LON_MIN = -24.0
-LON_MAX = 60.0 
-START_SUB = datetime.datetime(2000, 01, 1)
-END_SUB = datetime.datetime(2007, 12, 31)
-
-#regridding parameters
-gridLonStep=0.5
-gridLatStep=0.5
-
-#Regrid
-print("... regrid")
-new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
-new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
-
-list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5, START_SUB, END_SUB), 
- Bounds(0.0, 10.0,  29.0, 37.5, START_SUB, END_SUB),
- Bounds(10.0, 20.0, 25.0, 32.5, START_SUB, END_SUB),
- Bounds(20.0, 33.0, 25.0, 32.5, START_SUB, END_SUB),
- Bounds(-19.3,-10.2,12.0, 20.0, START_SUB, END_SUB),
- Bounds( 15.0, 30.0, 15.0, 25.0,START_SUB, END_SUB),
- Bounds(-10.0, 10.0, 7.3, 15.0, START_SUB, END_SUB),
- Bounds(-10.9, 10.0, 5.0, 7.3,  START_SUB, END_SUB),
- Bounds(33.9, 40.0,  6.9, 15.0, START_SUB, END_SUB),
- Bounds(10.0, 25.0,  0.0, 10.0, START_SUB, END_SUB),
- Bounds(10.0, 25.0,-10.0,  0.0, START_SUB, END_SUB),
- Bounds(30.0, 40.0,-15.0,  0.0, START_SUB, END_SUB),
- Bounds(33.0, 40.0, 25.0, 35.0, START_SUB, END_SUB)]
-
-#for plotting the subregions
-plotter.draw_subregions(list_of_regions, new_lats, new_lons, OUTPUT_PLOT, fmt='png')
-
-                               
-

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/subregions_portrait_diagram.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/subregions_portrait_diagram.py b/examples/old_examples/subregions_portrait_diagram.py
deleted file mode 100644
index 075de2d..0000000
--- a/examples/old_examples/subregions_portrait_diagram.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#Apache OCW lib immports
-from ocw.dataset import Dataset, Bounds
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-import ocw.utils as utils
-
-import datetime
-import numpy as np
-import numpy.ma as ma
-
-from os import path
-import urllib
-
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Three Local Model Files 
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
-FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "portrait_diagram"
-
-# Spatial and temporal configurations
-LAT_MIN = -45.0 
-LAT_MAX = 42.24
-LON_MIN = -24.0
-LON_MAX = 60.0 
-START = datetime.datetime(2000, 01, 1)
-END = datetime.datetime(2007, 12, 31)
-EVAL_BOUNDS = Bounds(LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-#variable that we are analyzing
-varName = 'pr' 
-
-#regridding parameters
-gridLonStep = 0.5
-gridLatStep = 0.5
-
-#some vars for this evaluation
-target_datasets_ensemble = []
-target_datasets = []
-allNames = []
-
-# Download necessary NetCDF file if not present
-if not path.exists(FILE_1):
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
-
-if not path.exists(FILE_2):
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
-
-if not path.exists(FILE_3):
-    urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
-
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
-target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
-target_datasets.append(local.load_file(FILE_2, varName, name="REGCM"))
-target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
-
-""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
-print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
-# the dataset_id and the parameter id were determined from  
-# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
-CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-""" Step 3: Processing Datasets so they are the same shape """
-print("Processing datasets ...")
-CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')
-print("... on units")
-CRU31 = dsp.water_flux_unit_conversion(CRU31)
-
-for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])	
-	target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-	target_datasets[member] = dsp.normalize_dataset_datetimes(target_datasets[member], 'monthly')
		
-		
-print("... spatial regridding")
-new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
-new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
-CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
-
-for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
-	
-#find the total annual mean. Note the function exists in util.py as def calc_climatology_year(dataset):
-_,CRU31.values = utils.calc_climatology_year(CRU31)
-
-for member, each_target_dataset in enumerate(target_datasets):
-	_, target_datasets[member].values = utils.calc_climatology_year(target_datasets[member])
-
-#make the model ensemble
-target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name="ENS"
-
-#append to the target_datasets for final analysis
-target_datasets.append(target_datasets_ensemble)
-
-for target in target_datasets:
-	allNames.append(target.name)
-
-list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5), 
- Bounds(0.0, 10.0,  29.0, 37.5), 
- Bounds(10.0, 20.0, 25.0, 32.5), 
- Bounds(20.0, 33.0, 25.0, 32.5), 
- Bounds(-19.3,-10.2,12.0, 20.0), 
- Bounds( 15.0, 30.0, 15.0, 25.0),
- Bounds(-10.0, 10.0, 7.3, 15.0), 
- Bounds(-10.9, 10.0, 5.0, 7.3),  
- Bounds(33.9, 40.0,  6.9, 15.0), 
- Bounds(10.0, 25.0,  0.0, 10.0), 
- Bounds(10.0, 25.0,-10.0,  0.0), 
- Bounds(30.0, 40.0,-15.0,  0.0), 
- Bounds(33.0, 40.0, 25.0, 35.00)]
-
-region_list=["R"+str(i+1) for i in xrange(13)]
-
-#metrics
-pattern_correlation = metrics.PatternCorrelation()
-
-#create the Evaluation object
-RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31, # Reference dataset for the evaluation
-                                    # 1 or more target datasets for the evaluation
-                                    target_datasets,
-                                    # 1 or more metrics to use in the evaluation
-                                    [pattern_correlation], 
-                                    # list of subregion Bounds Objects
-                                    list_of_regions)   
-RCMs_to_CRU_evaluation.run()
-
-new_patcor = np.squeeze(np.array(RCMs_to_CRU_evaluation.results), axis=1)
-
-plotter.draw_portrait_diagram(new_patcor,allNames, region_list, fname=OUTPUT_PLOT, fmt='png',
cmap='coolwarm_r')
-
-                              
-

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/taylor_diagram_example.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/taylor_diagram_example.py b/examples/old_examples/taylor_diagram_example.py
deleted file mode 100644
index b08502e..0000000
--- a/examples/old_examples/taylor_diagram_example.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import sys
-from os import path
-import urllib
-
-import numpy
-
-from ocw.dataset import Bounds
-import ocw.data_source.local as local
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
-FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc"
-
-# Download some example NetCDF files for the evaluation
-################################################################################
-if not path.exists(FILE_1):
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
-
-if not path.exists(FILE_2):
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
-
-# Load the example datasets into OCW Dataset objects. We want to load
-# the 'tasmax' variable values. We'll also name the datasets for use
-# when plotting.
-################################################################################
-knmi_dataset = local.load_file(FILE_1, "tasmax")
-wrf_dataset = local.load_file(FILE_2, "tasmax")
-
-knmi_dataset.name = "knmi"
-wrf_dataset.name = "wrf"
-
-# Date values from loaded datasets might not always fall on reasonable days.
-# With monthly data, we could have data falling on the 1st, 15th, or some other
-# day of the month. Let's fix that real quick.
-################################################################################
-knmi_dataset = dsp.normalize_dataset_datetimes(knmi_dataset, 'monthly')
-wrf_dataset = dsp.normalize_dataset_datetimes(wrf_dataset, 'monthly')
-
-# We're only going to run this evaluation over a years worth of data. We'll
-# make a Bounds object and use it to subset our datasets.
-################################################################################
-subset = Bounds(-45, 42, -24, 60, datetime.datetime(1989, 1, 1), datetime.datetime(1989,
12, 1))
-knmi_dataset = dsp.subset(subset, knmi_dataset)
-wrf_dataset = dsp.subset(subset, wrf_dataset)
-
-# Temporally re-bin the data into a monthly timestep.
-################################################################################
-knmi_dataset = dsp.temporal_rebin(knmi_dataset, datetime.timedelta(days=30))
-wrf_dataset = dsp.temporal_rebin(wrf_dataset, datetime.timedelta(days=30))
-
-# Spatially regrid the datasets onto a 1 degree grid.
-################################################################################
-# Get the bounds of the reference dataset and use it to create a new
-# set of lat/lon values on a 1 degree step
-# Using the bounds we will create a new set of lats and lons on 1 degree step
-min_lat, max_lat, min_lon, max_lon = knmi_dataset.spatial_boundaries()
-new_lons = numpy.arange(min_lon, max_lon, 1)
-new_lats = numpy.arange(min_lat, max_lat, 1)
-
-# Spatially regrid datasets using the new_lats, new_lons numpy arrays
-knmi_dataset = dsp.spatial_regrid(knmi_dataset, new_lats, new_lons)
-wrf_dataset = dsp.spatial_regrid(wrf_dataset, new_lats, new_lons)
-
-# Load the metrics that we want to use for the evaluation.
-################################################################################
-sstdr = metrics.StdDevRatio()
-pc = metrics.PatternCorrelation()
-
-# Create our new evaluation object. The knmi dataset is the evaluations
-# reference dataset. We then provide a list of 1 or more target datasets
-# to use for the evaluation. In this case, we only want to use the wrf dataset.
-# Then we pass a list of all the metrics that we want to use in the evaluation.
-################################################################################
-test_evaluation = evaluation.Evaluation(knmi_dataset, [wrf_dataset], [sstdr, pc])
-test_evaluation.run()
-
-# Pull our the evaluation results and prepare them for drawing a Taylor diagram.
-################################################################################
-spatial_stddev_ratio = test_evaluation.results[0][0]
-spatial_correlation = test_evaluation.results[0][1]
-
-taylor_data = numpy.array([[spatial_stddev_ratio], [spatial_correlation]]).transpose()
-
-# Draw our taylor diagram!
-################################################################################
-plotter.draw_taylor_diagram(taylor_data,
-                            [wrf_dataset.name],
-                            knmi_dataset.name,
-                            fname='taylor_plot',
-                            fmt='png',
-                            frameon=False)

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/time_series_with_regions.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/time_series_with_regions.py b/examples/old_examples/time_series_with_regions.py
deleted file mode 100644
index 1d552a8..0000000
--- a/examples/old_examples/time_series_with_regions.py
+++ /dev/null
@@ -1,141 +0,0 @@
-#Apache OCW lib immports
-from ocw.dataset import Dataset, Bounds
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-import ocw.utils as utils
-
-import datetime
-import numpy as np
-import numpy.ma as ma
-from os import path
-import urllib
-
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Three Local Model Files 
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
-FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-
-LAT_MIN = -45.0 
-LAT_MAX = 42.24 
-LON_MIN = -24.0
-LON_MAX = 60.0 
-START = datetime.datetime(2000, 01, 1)
-END = datetime.datetime(2007, 12, 31)
-
-EVAL_BOUNDS = Bounds(LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-varName = 'pr' 
-gridLonStep=0.44
-gridLatStep=0.44
-
-#needed vars for the script
-target_datasets =[]
-tSeries =[]
-results =[]
-labels =[] # could just as easily b the names for each subregion
-region_counter = 0
-
-# Download necessary NetCDF file if not present
-if not path.exists(FILE_1):
-	urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
-
-if not path.exists(FILE_2):
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
-
-if not path.exists(FILE_3):
-    urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
-
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
-target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
-target_datasets.append(local.load_file(FILE_2, varName, name="REGCM"))
-target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
-
-
-""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
-print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
-# the dataset_id and the parameter id were determined from  
-# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
-CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-
-""" Step 3: Processing datasets so they are the same shape ... """
-print("Processing datasets so they are the same shape")
-CRU31 = dsp.water_flux_unit_conversion(CRU31)
-CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')
-
-for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])
-	target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-	target_datasets[member] = dsp.normalize_dataset_datetimes(target_datasets[member], 'monthly')
 		
-	
-print("... spatial regridding")
-new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
-new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
-CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
-
-
-for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
-
-#find climatology monthly for obs and models
-CRU31.values, CRU31.times = utils.calc_climatology_monthly(CRU31)
-
-for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member].values, target_datasets[member].times = utils.calc_climatology_monthly(target_datasets[member])
-		
-#make the model ensemble
-target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name="ENS"
-
-#append to the target_datasets for final analysis
-target_datasets.append(target_datasets_ensemble)
-
-""" Step 4: Subregion stuff """
-list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5), 
- Bounds(0.0, 10.0,  29.0, 37.5), 
- Bounds(10.0, 20.0, 25.0, 32.5),
- Bounds(20.0, 33.0, 25.0, 32.5), 
- Bounds(-19.3,-10.2,12.0, 20.0), 
- Bounds( 15.0, 30.0, 15.0, 25.0),
- Bounds(-10.0, 10.0, 7.3, 15.0), 
- Bounds(-10.9, 10.0, 5.0, 7.3),  
- Bounds(33.9, 40.0,  6.9, 15.0),
- Bounds(10.0, 25.0,  0.0, 10.0), 
- Bounds(10.0, 25.0,-10.0,  0.0), 
- Bounds(30.0, 40.0,-15.0,  0.0), 
- Bounds(33.0, 40.0, 25.0, 35.0)]
-
-region_list=[["R"+str(i+1)] for i in xrange(13)]
-
-for regions in region_list:
-	firstTime = True
-	subset_name = regions[0]+"_CRU31"
-	#labels.append(subset_name) #for legend, uncomment this line
-	subset = dsp.subset(list_of_regions[region_counter], CRU31, subset_name)
-	tSeries = utils.calc_time_series(subset)
-	results.append(tSeries)
-	tSeries=[]
-	firstTime = False
-	for member, each_target_dataset in enumerate(target_datasets):
-		subset_name = regions[0]+"_"+target_datasets[member].name
-		#labels.append(subset_name) #for legend, uncomment this line
-		subset = dsp.subset(list_of_regions[region_counter],target_datasets[member],subset_name)
-		tSeries = utils.calc_time_series(subset)
-		results.append(tSeries)
-		tSeries=[]
-	
-	plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[0], ptitle=regions[0],fmt='png')
-	results =[]
-	tSeries =[]
-	labels =[]
-	region_counter+=1
-			
-                               
-

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/run_RCMES.py
----------------------------------------------------------------------
diff --git a/examples/run_RCMES.py b/examples/run_RCMES.py
deleted file mode 100644
index 9039486..0000000
--- a/examples/run_RCMES.py
+++ /dev/null
@@ -1,216 +0,0 @@
-#Apache OCW lib immports
-import ocw.dataset_processor as dsp
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-import ocw.plotter as plotter
-import ocw.utils as utils
-from ocw.dataset import Bounds
-
-import matplotlib.pyplot as plt
-from matplotlib import rcParams
-import numpy as np
-import numpy.ma as ma
-import yaml
-from glob import glob
-import operator
-from dateutil import parser
-from datetime import datetime
-import os
-import sys
-
-from example_package import *
-
-import ssl
-if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
-
-config_file = str(sys.argv[1])
-
-print 'Reading the configuration file ', config_file
-config = yaml.load(open(config_file))
-time_info = config['time']
-temporal_resolution = time_info['temporal_resolution']
-
-start_time = datetime.strptime(time_info['start_time'].strftime('%Y%m%d'),'%Y%m%d')
-end_time = datetime.strptime(time_info['end_time'].strftime('%Y%m%d'),'%Y%m%d')
-
-space_info = config['space']
-min_lat = space_info['min_lat']
-max_lat = space_info['max_lat']
-min_lon = space_info['min_lon']
-max_lon = space_info['max_lon']
-
-""" Step 1: Load the reference data """
-ref_data_info = config['datasets']['reference']
-print 'Loading observation dataset:\n',ref_data_info
-ref_name = ref_data_info['data_name']
-if ref_data_info['data_source'] == 'local':
-    ref_dataset = local.load_file(ref_data_info['path'],
-                                  ref_data_info['variable'], name=ref_name)
-elif ref_data_info['data_source'] == 'rcmed':
-      ref_dataset = rcmed.parameter_dataset(ref_data_info['dataset_id'],
-                                            ref_data_info['parameter_id'],
-                                            min_lat, max_lat, min_lon, max_lon,
-                                            start_time, end_time)
-else:
-    print ' '
-    # TO DO: support ESGF
-
-ref_dataset =  dsp.normalize_dataset_datetimes(ref_dataset, temporal_resolution)
-
-""" Step 2: Load model NetCDF Files into OCW Dataset Objects """
-model_data_info = config['datasets']['targets']
-print 'Loading model datasets:\n',model_data_info
-if model_data_info['data_source'] == 'local':
-    model_datasets, model_names = local.load_multiple_files(file_path = model_data_info['path'],
-                                                            variable_name =model_data_info['variable'])
-else:
-    print ' '
-    # TO DO: support RCMED and ESGF
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.normalize_dataset_datetimes(dataset, temporal_resolution)
-
-""" Step 3: Subset the data for temporal and spatial domain """
-# Create a Bounds object to use for subsetting
-if time_info['maximum_overlap_period']:
-    start_time, end_time = utils.get_temporal_overlap([ref_dataset]+model_datasets)
-    print 'Maximum overlap period'
-    print 'start_time:', start_time
-    print 'end_time:', end_time
-
-if temporal_resolution == 'monthly' and end_time.day !=1:
-    end_time = end_time.replace(day=1)
-if ref_data_info['data_source'] == 'rcmed':
-    min_lat = np.max([min_lat, ref_dataset.lats.min()])
-    max_lat = np.min([max_lat, ref_dataset.lats.max()])
-    min_lon = np.max([min_lon, ref_dataset.lons.min()])
-    max_lon = np.min([max_lon, ref_dataset.lons.max()])
-bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
-
-if ref_dataset.lats.ndim !=2 and ref_dataset.lons.ndim !=2:
-    ref_dataset = dsp.subset(bounds,ref_dataset)
-for idata,dataset in enumerate(model_datasets):
-    if dataset.lats.ndim !=2 and dataset.lons.ndim !=2:
-        model_datasets[idata] = dsp.subset(bounds,dataset)
-
-# Temporaly subset both observation and model datasets for the user specified season
-month_start = time_info['month_start']
-month_end = time_info['month_end']
-average_each_year = time_info['average_each_year']
-
-ref_dataset = dsp.temporal_subset(month_start, month_end,ref_dataset,average_each_year)
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.temporal_subset(month_start, month_end,dataset,average_each_year)
-
-# generate grid points for regridding
-if config['regrid']['regrid_on_reference']:
-    new_lat = ref_dataset.lats
-    new_lon = ref_dataset.lons 
-else:
-    delta_lat = config['regrid']['regrid_dlat']
-    delta_lon = config['regrid']['regrid_dlon']
-    nlat = (max_lat - min_lat)/delta_lat+1
-    nlon = (max_lon - min_lon)/delta_lon+1
-    new_lat = np.linspace(min_lat, max_lat, nlat)
-    new_lon = np.linspace(min_lon, max_lon, nlon)
-
-# number of models
-nmodel = len(model_datasets)
-print 'Dataset loading completed'
-print 'Observation data:', ref_name 
-print 'Number of model datasets:',nmodel
-for model_name in model_names:
-    print model_name
-
-""" Step 4: Spatial regriding of the reference datasets """
-print 'Regridding datasets: ', config['regrid']
-if not config['regrid']['regrid_on_reference']:
-    ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon)
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon)
-
-print 'Propagating missing data information'
-ref_dataset = dsp.mask_missing_data([ref_dataset]+model_datasets)[0]
-model_datasets = dsp.mask_missing_data([ref_dataset]+model_datasets)[1:]
-
-""" Step 5: Checking and converting variable units """
-print 'Checking and converting variable units'
-ref_dataset = dsp.variable_unit_conversion(ref_dataset)
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.variable_unit_conversion(dataset)
-    
-
-print 'Generating multi-model ensemble'
-model_datasets.append(dsp.ensemble(model_datasets))
-model_names.append('ENS-models')
-
-""" Step 6: Generate subregion average and standard deviation """
-if config['use_subregions']:
-    # sort the subregion by region names and make a list
-    subregions= sorted(config['subregions'].items(),key=operator.itemgetter(0))
-
-    # number of subregions
-    nsubregion = len(subregions)
-
-    print 'Calculating spatial averages and standard deviations of ',str(nsubregion),' subregions'
-
-    ref_subregion_mean, ref_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std([ref_dataset],
subregions) 
-    model_subregion_mean, model_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std(model_datasets,
subregions) 
-
-""" Step 7: Write a netCDF file """
-workdir = config['workdir']
-if workdir[-1] != '/':
-    workdir = workdir+'/'
-print 'Writing a netcdf file: ',workdir+config['output_netcdf_filename']
-if not os.path.exists(workdir):
-    os.system("mkdir "+workdir)
-
-if config['use_subregions']:
-    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets,
model_names,
-                                                       path=workdir+config['output_netcdf_filename'],
-                                                       subregions=subregions, subregion_array
= subregion_array, 
-                                                       ref_subregion_mean=ref_subregion_mean,
ref_subregion_std=ref_subregion_std,
-                                                       model_subregion_mean=model_subregion_mean,
model_subregion_std=model_subregion_std)
-else:
-    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets,
model_names,
-                                                       path=workdir+config['output_netcdf_filename'])
-
-""" Step 8: Calculate metrics and draw plots """
-nmetrics = config['number_of_metrics_and_plots']
-if config['use_subregions']:
-    Map_plot_subregion(subregions, ref_dataset, workdir)
-
-if nmetrics > 0:
-    print 'Calculating metrics and generating plots'
-    for imetric in np.arange(nmetrics)+1:
-        metrics_name = config['metrics'+'%1d' %imetric]
-        plot_info = config['plots'+'%1d' %imetric]
-        file_name = workdir+plot_info['file_name']
-
-        print 'metrics '+str(imetric)+'/'+str(nmetrics)+': ', metrics_name
-        if metrics_name == 'Map_plot_bias_of_multiyear_climatology':
-            row, column = plot_info['subplots_array']
-            Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets,
model_names,
-                                      file_name, row, column)
-        elif metrics_name == 'Taylor_diagram_spatial_pattern_of_multiyear_climatology':
-            Taylor_diagram_spatial_pattern_of_multiyear_climatology(ref_dataset, ref_name,
model_datasets, model_names,
-                                      file_name)
-        elif config['use_subregions']:
-            if metrics_name == 'Timeseries_plot_subregion_interannual_variability' and average_each_year:
-                row, column = plot_info['subplots_array']
-                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean,
model_names, False,
-                                      file_name, row, column, x_tick=['Y'+str(i+1) for i
in np.arange(model_subregion_mean.shape[1])])
-            if metrics_name == 'Timeseries_plot_subregion_annual_cycle' and not average_each_year
and month_start==1 and month_end==12:
-                row, column = plot_info['subplots_array']
-                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean,
model_names, True,
-                                      file_name, row, column, x_tick=['J','F','M','A','M','J','J','A','S','O','N','D'])
-            if metrics_name == 'Portrait_diagram_subregion_interannual_variability' and average_each_year:
-                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean,
model_names, False,
-                                      file_name)
-            if metrics_name == 'Portrait_diagram_subregion_annual_cycle' and not average_each_year
and month_start==1 and month_end==12:
-                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean,
model_names, True,
-                                      file_name)
-        else:
-            print 'please check the currently supported metrics'
-
-


Mime
View raw message