climate-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From omk...@apache.org
Subject [3/4] climate git commit: CLIMATE-852 [OCW Documentation] Theme not found error
Date Tue, 27 Sep 2016 21:05:39 GMT
CLIMATE-852 [OCW Documentation] Theme not found error


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/731419f8
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/731419f8
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/731419f8

Branch: refs/heads/master
Commit: 731419f8a2b31b8f3466d78efe7710957c44f813
Parents: f931bb1
Author: Lewis John McGibbney <lewis.mcgibbney@gmail.com>
Authored: Mon Sep 26 19:59:22 2016 -0700
Committer: Lewis John McGibbney <lewis.mcgibbney@gmail.com>
Committed: Mon Sep 26 19:59:22 2016 -0700

----------------------------------------------------------------------
 docs/source/conf.py                           |  35 ++--
 docs/source/data_source/data_sources.rst      |   4 +-
 docs/source/ocw/dataset_loader.rst            |   2 +-
 examples/draw_climatology_map_MISR_AOD.py     |  18 +-
 examples/esgf_integration_example.py          |   2 +-
 examples/knmi_to_cru31_full_bias.py           |  59 ++++---
 examples/model_ensemble_to_rcmed.py           |  51 +++---
 examples/multi_model_evaluation.py            |  97 +++++-----
 examples/multi_model_taylor_diagram.py        | 106 +++++------
 examples/simple_model_to_model_bias.py        |  14 +-
 examples/simple_model_tstd.py                 |   5 +-
 examples/subregions_portrait_diagram.py       | 115 ++++++------
 examples/subregions_rectangular_boundaries.py |  50 +++---
 examples/subset_TRMM_data_for_NCA_regions.py  |  32 ++--
 examples/taylor_diagram_example.py            |  32 ++--
 examples/time_series_with_regions.py          | 146 +++++++--------
 ez_setup.py                                   |  20 ++-
 mccsearch/code/mainProg.py                    |  67 ++++---
 mccsearch/code/mainProgTemplate.py            |  67 ++++---
 ocw/data_source/esgf.py                       |   3 +
 ocw/data_source/local.py                      | 141 ++++++++-------
 ocw/data_source/rcmed.py                      |  47 ++---
 ocw/dataset.py                                |  46 +++--
 ocw/dataset_processor.py                      |  73 ++++----
 ocw/esgf/download.py                          |  35 ++--
 ocw/esgf/logon.py                             |   2 +-
 ocw/esgf/main.py                              |  47 ++---
 ocw/esgf/search.py                            |  27 ++-
 ocw/evaluation.py                             |  65 ++++---
 ocw/metrics.py                                |  88 +++++----
 ocw/plotter.py                                | 196 ++++++++++++---------
 ocw/statistical_downscaling.py                |  41 +++--
 ocw/tests/test_dap.py                         |   1 +
 ocw/tests/test_dataset.py                     |  26 ++-
 ocw/tests/test_dataset_loader.py              |   7 +-
 ocw/tests/test_dataset_processor.py           |  10 ++
 ocw/tests/test_evaluation.py                  |   1 +
 ocw/tests/test_local.py                       |  10 +-
 ocw/tests/test_plotter.py                     |   3 +
 ocw/tests/test_rcmed.py                       |   1 +
 ocw/tests/test_utils.py                       |   7 +
 ocw/utils.py                                  | 106 ++++++-----
 setup.py                                      |  29 +--
 test_smoke.py                                 |   4 +-
 44 files changed, 1077 insertions(+), 861 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/docs/source/conf.py
----------------------------------------------------------------------
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 76f21fe..34852c4 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -11,7 +11,8 @@
 # All configuration values have a default; values that are commented out
 # serve to show the default.
 
-import sys, os
+import sys
+import os
 
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
@@ -25,7 +26,7 @@ sys.path.insert(0, os.path.abspath('../../ocw-ui/backend'))
 sys.path.insert(0, os.path.abspath('../../ocw-config-runner'))
 
 
-# -- General configuration -----------------------------------------------------
+# -- General configuration -----------------------------------------------
 
 # If your documentation needs a minimal Sphinx version, state it here.
 #needs_sphinx = '1.0'
@@ -102,7 +103,7 @@ pygments_style = 'sphinx'
 #keep_warnings = False
 
 
-# -- Options for HTML output ---------------------------------------------------
+# -- Options for HTML output ---------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
@@ -182,24 +183,24 @@ html_logo = 'ocw-logo-variant-sm-01-01-new.png'
 htmlhelp_basename = 'ApacheOpenClimateWorkbenchdoc'
 
 
-# -- Options for LaTeX output --------------------------------------------------
+# -- Options for LaTeX output --------------------------------------------
 
 latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
+    # The paper size ('letterpaper' or 'a4paper').
+    #'papersize': 'letterpaper',
 
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
+    # The font size ('10pt', '11pt' or '12pt').
+    #'pointsize': '10pt',
 
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
+    # Additional stuff for the LaTeX preamble.
+    #'preamble': '',
 }
 
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'ApacheOpenClimateWorkbench.tex', u'Apache Open Climate Workbench Documentation',
-   u'Michael Joyce', 'manual'),
+    ('index', 'ApacheOpenClimateWorkbench.tex', u'Apache Open Climate Workbench Documentation',
+     u'Michael Joyce', 'manual'),
 ]
 
 # The name of an image file (relative to this directory) to place at the top of
@@ -223,7 +224,7 @@ latex_documents = [
 #latex_domain_indices = True
 
 
-# -- Options for manual page output --------------------------------------------
+# -- Options for manual page output --------------------------------------
 
 # One entry per manual page. List of tuples
 # (source start file, name, description, authors, manual section).
@@ -236,15 +237,15 @@ man_pages = [
 #man_show_urls = False
 
 
-# -- Options for Texinfo output ------------------------------------------------
+# -- Options for Texinfo output ------------------------------------------
 
 # Grouping the document tree into Texinfo files. List of tuples
 # (source start file, target name, title, author,
 #  dir menu entry, description, category)
 texinfo_documents = [
-  ('index', 'ApacheOpenClimateWorkbench', u'Apache Open Climate Workbench Documentation',
-   u'Michael Joyce', 'ApacheOpenClimateWorkbench', 'One line description of project.',
-   'Miscellaneous'),
+    ('index', 'ApacheOpenClimateWorkbench', u'Apache Open Climate Workbench Documentation',
+     u'Michael Joyce', 'ApacheOpenClimateWorkbench', 'One line description of project.',
+     'Miscellaneous'),
 ]
 
 # Documents to append as an appendix to all manuals.

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/docs/source/data_source/data_sources.rst
----------------------------------------------------------------------
diff --git a/docs/source/data_source/data_sources.rst b/docs/source/data_source/data_sources.rst
index 4bb4f16..755091c 100644
--- a/docs/source/data_source/data_sources.rst
+++ b/docs/source/data_source/data_sources.rst
@@ -22,6 +22,6 @@ ESGF Module
     :members:
 
 PODAAC Module
-===========
-.. automodule:: podaac
+=============
+.. automodule:: podaac_datasource
     :members:

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/docs/source/ocw/dataset_loader.rst
----------------------------------------------------------------------
diff --git a/docs/source/ocw/dataset_loader.rst b/docs/source/ocw/dataset_loader.rst
index 833b7f9..cf48d46 100644
--- a/docs/source/ocw/dataset_loader.rst
+++ b/docs/source/ocw/dataset_loader.rst
@@ -1,5 +1,5 @@
 Dataset Loader Module
-**************
+*********************
 
 .. automodule:: dataset_loader
     :members:

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/draw_climatology_map_MISR_AOD.py
----------------------------------------------------------------------
diff --git a/examples/draw_climatology_map_MISR_AOD.py b/examples/draw_climatology_map_MISR_AOD.py
index d243641..979c0f5 100644
--- a/examples/draw_climatology_map_MISR_AOD.py
+++ b/examples/draw_climatology_map_MISR_AOD.py
@@ -26,7 +26,7 @@ import numpy.ma as ma
 
 ''' data source: https://dx.doi.org/10.6084/m9.figshare.3753321.v1
     AOD_monthly_2000-Mar_2016-FEB_from_MISR_L3_JOINT.nc is publicly available.'''
-dataset = local.load_file('AOD_monthly_2000-MAR_2016-FEB_from_MISR_L3_JOINT.nc', 
+dataset = local.load_file('AOD_monthly_2000-MAR_2016-FEB_from_MISR_L3_JOINT.nc',
                           'nonabsorbing_ave')
 ''' Subset the data for East Asia'''
 Bounds = ds.Bounds(lat_min=20, lat_max=57.7, lon_min=90, lon_max=150)
@@ -34,19 +34,21 @@ dataset = dsp.subset(dataset, Bounds)
 
 '''The original dataset includes nonabsorbing AOD values between March 2000 and February 2015. 
 dsp.temporal_subset will extract data in September-October-November.'''
-dataset_SON = dsp.temporal_subset(dataset, month_start=9, month_end=11, average_each_year=True)
+dataset_SON = dsp.temporal_subset(
+    dataset, month_start=9, month_end=11, average_each_year=True)
 
 ny, nx = dataset_SON.values.shape[1:]
 
 # multi-year mean aod
 clim_aod = ma.zeros([3, ny, nx])
 
-clim_aod[0,:] = ma.mean(dataset_SON.values, axis=0) # 16-year mean
-clim_aod[1,:] = ma.mean(dataset_SON.values[-5:,:], axis=0) # the last 5-year mean
-clim_aod[2,:] = dataset_SON.values[-1,:] # the last year's value
+clim_aod[0, :] = ma.mean(dataset_SON.values, axis=0)  # 16-year mean
+clim_aod[1, :] = ma.mean(dataset_SON.values[-5:, :],
+                         axis=0)  # the last 5-year mean
+clim_aod[2, :] = dataset_SON.values[-1, :]  # the last year's value
 
 # plot clim_aod (3 subplots)
-plotter.draw_contour_map(clim_aod, dataset_SON.lats, dataset_SON.lons, 
+plotter.draw_contour_map(clim_aod, dataset_SON.lats, dataset_SON.lons,
                          fname='nonabsorbing_AOD_clim_East_Asia_Sep-Nov',
-                         gridshape=[1,3],subtitles=['2000-2015: 16 years','2011-2015: 5 years', '2015: 1 year'], 
-                         clevs=np.arange(21)*0.02)
+                         gridshape=[1, 3], subtitles=['2000-2015: 16 years', '2011-2015: 5 years', '2015: 1 year'],
+                         clevs=np.arange(21) * 0.02)

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/esgf_integration_example.py
----------------------------------------------------------------------
diff --git a/examples/esgf_integration_example.py b/examples/esgf_integration_example.py
index 8914c8e..7a02632 100644
--- a/examples/esgf_integration_example.py
+++ b/examples/esgf_integration_example.py
@@ -20,7 +20,7 @@ from getpass import getpass
 import ssl
 
 if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
+    ssl._create_default_https_context = ssl._create_unverified_context
 
 dataset_id = 'obs4MIPs.CNES.AVISO.zos.mon.v20110829|esgf-data.jpl.nasa.gov'
 variable = 'zosStderr'

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/knmi_to_cru31_full_bias.py
----------------------------------------------------------------------
diff --git a/examples/knmi_to_cru31_full_bias.py b/examples/knmi_to_cru31_full_bias.py
index dd70341..95b64a8 100644
--- a/examples/knmi_to_cru31_full_bias.py
+++ b/examples/knmi_to_cru31_full_bias.py
@@ -31,13 +31,13 @@ import ocw.plotter as plotter
 import ssl
 
 if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
+    ssl._create_default_https_context = ssl._create_unverified_context
 
 # File URL leader
 FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
 # This way we can easily adjust the time span of the retrievals
 YEARS = 3
-# Two Local Model Files 
+# Two Local Model Files
 MODEL = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
 # Filename for the output image/plot (without file extension)
 OUTPUT_PLOT = "cru_31_tmax_knmi_africa_bias_full"
@@ -51,7 +51,8 @@ else:
 """ Step 1: Load Local NetCDF File into OCW Dataset Objects """
 print("Loading %s into an OCW Dataset Object" % (MODEL,))
 knmi_dataset = local.load_file(MODEL, "tasmax")
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
+print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" %
+      (knmi_dataset.values.shape,))
 
 """ Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
 print("Working with the rcmed interface to get CRU3.1 Daily-Max Temp")
@@ -89,11 +90,12 @@ start_time = max([cru_start, knmi_start])
 # Grab the Min End Time
 end_time = min([cru_end, knmi_end])
 print("Overlap computed to be: %s to %s" % (start_time.strftime("%Y-%m-%d"),
-                                          end_time.strftime("%Y-%m-%d")))
+                                            end_time.strftime("%Y-%m-%d")))
 print("We are going to grab the first %s year(s) of data" % YEARS)
-end_time = datetime.datetime(start_time.year + YEARS, start_time.month, start_time.day)
+end_time = datetime.datetime(
+    start_time.year + YEARS, start_time.month, start_time.day)
 print("Final Overlap is: %s to %s" % (start_time.strftime("%Y-%m-%d"),
-                                          end_time.strftime("%Y-%m-%d")))
+                                      end_time.strftime("%Y-%m-%d")))
 
 print("Fetching data from RCMED...")
 cru31_dataset = rcmed.parameter_dataset(dataset_id,
@@ -106,30 +108,35 @@ cru31_dataset = rcmed.parameter_dataset(dataset_id,
                                         end_time)
 
 """ Step 3: Resample Datasets so they are the same shape """
-print("CRU31_Dataset.values shape: (times, lats, lons) - %s" % (cru31_dataset.values.shape,))
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s" % (knmi_dataset.values.shape,))
+print("CRU31_Dataset.values shape: (times, lats, lons) - %s" %
+      (cru31_dataset.values.shape,))
+print("KNMI_Dataset.values shape: (times, lats, lons) - %s" %
+      (knmi_dataset.values.shape,))
 print("Our two datasets have a mis-match in time. We will subset on time to %s years\n" % YEARS)
 
 # Create a Bounds object to use for subsetting
-new_bounds = Bounds(lat_min=min_lat, lat_max=max_lat, lon_min=min_lon, lon_max=max_lon, start=start_time, end=end_time)
+new_bounds = Bounds(lat_min=min_lat, lat_max=max_lat, lon_min=min_lon,
+                    lon_max=max_lon, start=start_time, end=end_time)
 knmi_dataset = dsp.subset(knmi_dataset, new_bounds)
 
-print("CRU31_Dataset.values shape: (times, lats, lons) - %s" % (cru31_dataset.values.shape,))
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
+print("CRU31_Dataset.values shape: (times, lats, lons) - %s" %
+      (cru31_dataset.values.shape,))
+print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" %
+      (knmi_dataset.values.shape,))
 
 print("Temporally Rebinning the Datasets to a Single Timestep")
-# To run FULL temporal Rebinning 
-knmi_dataset = dsp.temporal_rebin(knmi_dataset, temporal_resolution = 'full')
-cru31_dataset = dsp.temporal_rebin(cru31_dataset, temporal_resolution = 'full')
+# To run FULL temporal Rebinning
+knmi_dataset = dsp.temporal_rebin(knmi_dataset, temporal_resolution='full')
+cru31_dataset = dsp.temporal_rebin(cru31_dataset, temporal_resolution='full')
 
 print("KNMI_Dataset.values shape: %s" % (knmi_dataset.values.shape,))
 print("CRU31_Dataset.values shape: %s \n\n" % (cru31_dataset.values.shape,))
- 
+
 """ Spatially Regrid the Dataset Objects to a 1/2 degree grid """
 # Using the bounds we will create a new set of lats and lons on 0.5 degree step
 new_lons = np.arange(min_lon, max_lon, 0.5)
 new_lats = np.arange(min_lat, max_lat, 0.5)
- 
+
 # Spatially regrid datasets using the new_lats, new_lons numpy arrays
 print("Spatially Regridding the KNMI_Dataset...")
 knmi_dataset = dsp.spatial_regrid(knmi_dataset, new_lats, new_lons)
@@ -137,7 +144,7 @@ print("Spatially Regridding the CRU31_Dataset...")
 cru31_dataset = dsp.spatial_regrid(cru31_dataset, new_lats, new_lons)
 print("Final shape of the KNMI_Dataset:%s" % (knmi_dataset.values.shape, ))
 print("Final shape of the CRU31_Dataset:%s" % (cru31_dataset.values.shape, ))
- 
+
 """ Step 4:  Build a Metric to use for Evaluation - Bias for this example """
 # You can build your own metrics, but OCW also ships with some common metrics
 print("Setting up a Bias metric to use for evaluation")
@@ -152,7 +159,7 @@ print("Making the Evaluation definition")
 bias_evaluation = evaluation.Evaluation(knmi_dataset, [cru31_dataset], [bias])
 print("Executing the Evaluation using the object's run() method")
 bias_evaluation.run()
- 
+
 """ Step 6: Make a Plot from the Evaluation.results """
 # The Evaluation.results are a set of nested lists to support many different
 # possible Evaluation scenarios.
@@ -162,18 +169,20 @@ bias_evaluation.run()
 # Accessing the actual results when we have used 1 metric and 1 dataset is
 # done this way:
 print("Accessing the Results of the Evaluation run")
-results = bias_evaluation.results[0][0,:]
- 
+results = bias_evaluation.results[0][0, :]
+
 # From the bias output I want to make a Contour Map of the region
 print("Generating a contour map using ocw.plotter.draw_contour_map()")
- 
+
 lats = new_lats
 lons = new_lons
 fname = OUTPUT_PLOT
-gridshape = (1, 1)  # Using a 1 x 1 since we have a single Bias for the full time range
-plot_title = "TASMAX Bias of KNMI Compared to CRU 3.1 (%s - %s)" % (start_time.strftime("%Y/%d/%m"), end_time.strftime("%Y/%d/%m"))
+# Using a 1 x 1 since we have a single Bias for the full time range
+gridshape = (1, 1)
+plot_title = "TASMAX Bias of KNMI Compared to CRU 3.1 (%s - %s)" % (
+    start_time.strftime("%Y/%d/%m"), end_time.strftime("%Y/%d/%m"))
 sub_titles = ["Full Temporal Range"]
- 
+
 plotter.draw_contour_map(results, lats, lons, fname,
-                         gridshape=gridshape, ptitle=plot_title, 
+                         gridshape=gridshape, ptitle=plot_title,
                          subtitles=sub_titles)

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/model_ensemble_to_rcmed.py
----------------------------------------------------------------------
diff --git a/examples/model_ensemble_to_rcmed.py b/examples/model_ensemble_to_rcmed.py
index 7b82197..e8e3dbe 100644
--- a/examples/model_ensemble_to_rcmed.py
+++ b/examples/model_ensemble_to_rcmed.py
@@ -32,13 +32,13 @@ import ocw.plotter as plotter
 import ssl
 
 if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
+    ssl._create_default_https_context = ssl._create_unverified_context
 
 # File URL leader
 FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
 # This way we can easily adjust the time span of the retrievals
 YEARS = 1
-# Two Local Model Files 
+# Two Local Model Files
 FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
 FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc"
 # Filename for the output image/plot (without file extension)
@@ -65,7 +65,6 @@ wrf311_dataset = local.load_file(FILE_2, "tasmax")
 wrf311_dataset.name = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax"
 
 
-
 """ Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
 print("Working with the rcmed interface to get CRU3.1 Daily-Max Temp")
 metadata = rcmed.get_parameters_metadata()
@@ -88,7 +87,7 @@ parameter_id = int(cru_31['parameter_id'])
 min_lat, max_lat, min_lon, max_lon = wrf311_dataset.spatial_boundaries()
 
 #  There is a boundry alignment issue with the datasets.  To mitigate this
-#  we will use the math.floor() and math.ceil() functions to shrink the 
+#  we will use the math.floor() and math.ceil() functions to shrink the
 #  boundries slighty.
 min_lat = math.ceil(min_lat)
 max_lat = math.floor(max_lat)
@@ -101,11 +100,11 @@ cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
 cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
 knmi_start, knmi_end = knmi_dataset.temporal_boundaries()
 # Set the Time Range to be the year 1989
-start_time = datetime.datetime(1989,1,1)
-end_time = datetime.datetime(1989,12,1)
+start_time = datetime.datetime(1989, 1, 1)
+end_time = datetime.datetime(1989, 12, 1)
 
 print("Time Range is: %s to %s" % (start_time.strftime("%Y-%m-%d"),
-                                          end_time.strftime("%Y-%m-%d")))
+                                   end_time.strftime("%Y-%m-%d")))
 
 print("Fetching data from RCMED...")
 cru31_dataset = rcmed.parameter_dataset(dataset_id,
@@ -121,14 +120,16 @@ cru31_dataset = rcmed.parameter_dataset(dataset_id,
 
 print("Temporally Rebinning the Datasets to an Annual Timestep")
 # To run annual temporal Rebinning,
-knmi_dataset = dsp.temporal_rebin(knmi_dataset, temporal_resolution = 'annual')
-wrf311_dataset = dsp.temporal_rebin(wrf311_dataset, temporal_resolution = 'annual')
-cru31_dataset = dsp.temporal_rebin(cru31_dataset, temporal_resolution = 'annual')
+knmi_dataset = dsp.temporal_rebin(knmi_dataset, temporal_resolution='annual')
+wrf311_dataset = dsp.temporal_rebin(
+    wrf311_dataset, temporal_resolution='annual')
+cru31_dataset = dsp.temporal_rebin(cru31_dataset, temporal_resolution='annual')
 
-# Running Temporal Rebin early helps negate the issue of datasets being on different 
+# Running Temporal Rebin early helps negate the issue of datasets being on different
 # days of the month (1st vs. 15th)
 # Create a Bounds object to use for subsetting
-new_bounds = Bounds(lat_min=min_lat, lat_max=max_lat, lon_min=min_lon, lon_max=max_lon, start=start_time, end=end_time)
+new_bounds = Bounds(lat_min=min_lat, lat_max=max_lat, lon_min=min_lon,
+                    lon_max=max_lon, start=start_time, end=end_time)
 
 # Subset our model datasets so they are the same size
 knmi_dataset = dsp.subset(knmi_dataset, new_bounds)
@@ -138,7 +139,7 @@ wrf311_dataset = dsp.subset(wrf311_dataset, new_bounds)
 # Using the bounds we will create a new set of lats and lons on 1/2 degree step
 new_lons = np.arange(min_lon, max_lon, 0.5)
 new_lats = np.arange(min_lat, max_lat, 0.5)
- 
+
 # Spatially regrid datasets using the new_lats, new_lons numpy arrays
 knmi_dataset = dsp.spatial_regrid(knmi_dataset, new_lats, new_lons)
 wrf311_dataset = dsp.spatial_regrid(wrf311_dataset, new_lats, new_lons)
@@ -157,12 +158,13 @@ bias = metrics.Bias()
 # Evaluation can take in multiple targets and metrics, so we need to convert
 # our examples into Python lists.  Evaluation will iterate over the lists
 print("Making the Evaluation definition")
-bias_evaluation = evaluation.Evaluation(cru31_dataset, 
-                      [knmi_dataset, wrf311_dataset, ensemble_dataset],
-                      [bias])
+bias_evaluation = evaluation.Evaluation(cru31_dataset,
+                                        [knmi_dataset, wrf311_dataset,
+                                            ensemble_dataset],
+                                        [bias])
 print("Executing the Evaluation using the object's run() method")
 bias_evaluation.run()
- 
+
 """ Step 6: Make a Plot from the Evaluation.results """
 # The Evaluation.results are a set of nested lists to support many different
 # possible Evaluation scenarios.
@@ -173,18 +175,19 @@ bias_evaluation.run()
 # done this way:
 print("Accessing the Results of the Evaluation run")
 results = bias_evaluation.results[0]
- 
+
 # From the bias output I want to make a Contour Map of the region
 print("Generating a contour map using ocw.plotter.draw_contour_map()")
- 
+
 lats = new_lats
 lons = new_lons
 fname = OUTPUT_PLOT
 gridshape = (3, 1)  # Using a 3 x 1 since we have a 1 year of data for 3 models
 plotnames = ["KNMI", "WRF311", "ENSEMBLE"]
 for i in np.arange(3):
-  plot_title = "TASMAX Bias of CRU 3.1 vs. %s (%s - %s)" % (plotnames[i], start_time.strftime("%Y/%d/%m"), end_time.strftime("%Y/%d/%m"))
-  output_file = "%s_%s" % (fname, plotnames[i].lower())
-  print "creating %s" % (output_file,)
-  plotter.draw_contour_map(results[i,:], lats, lons, output_file,
-                         gridshape=gridshape, ptitle=plot_title)
+    plot_title = "TASMAX Bias of CRU 3.1 vs. %s (%s - %s)" % (
+        plotnames[i], start_time.strftime("%Y/%d/%m"), end_time.strftime("%Y/%d/%m"))
+    output_file = "%s_%s" % (fname, plotnames[i].lower())
+    print "creating %s" % (output_file,)
+    plotter.draw_contour_map(results[i, :], lats, lons, output_file,
+                             gridshape=gridshape, ptitle=plot_title)

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/multi_model_evaluation.py
----------------------------------------------------------------------
diff --git a/examples/multi_model_evaluation.py b/examples/multi_model_evaluation.py
index 97c96d9..7756cc9 100644
--- a/examples/multi_model_evaluation.py
+++ b/examples/multi_model_evaluation.py
@@ -21,7 +21,7 @@ import numpy as np
 from os import path
 
 
-#import Apache OCW dependences
+# import Apache OCW dependences
 import ocw.data_source.local as local
 import ocw.data_source.rcmed as rcmed
 from ocw.dataset import Bounds as Bounds
@@ -32,34 +32,35 @@ import ocw.plotter as plotter
 import ocw.utils as utils
 import ssl
 if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
-  
+    ssl._create_default_https_context = ssl._create_unverified_context
+
 # File URL leader
 FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Three Local Model Files 
+# Three Local Model Files
 FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
 FILE_2 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
 # Filename for the output image/plot (without file extension)
 OUTPUT_PLOT = "pr_africa_bias_annual"
-#variable that we are analyzing
-varName = 'pr' 
+# variable that we are analyzing
+varName = 'pr'
 # Spatial and temporal configurations
-LAT_MIN = -45.0 
+LAT_MIN = -45.0
 LAT_MAX = 42.24
 LON_MIN = -24.0
-LON_MAX = 60.0 
+LON_MAX = 60.0
 START = datetime.datetime(2000, 1, 1)
 END = datetime.datetime(2007, 12, 31)
-EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX, lon_min=LON_MIN, lon_max=LON_MAX, start=START, end=END)
+EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX,
+                     lon_min=LON_MIN, lon_max=LON_MAX, start=START, end=END)
 
-#regridding parameters
-gridLonStep=0.5
-gridLatStep=0.5
+# regridding parameters
+gridLonStep = 0.5
+gridLatStep = 0.5
 
-#list for all target_datasets
-target_datasets =[]
-#list for names for all the datasets
-allNames =[]
+# list for all target_datasets
+target_datasets = []
+# list for names for all the datasets
+allNames = []
 
 
 # Download necessary NetCDF file if not present
@@ -81,9 +82,10 @@ target_datasets.append(local.load_file(FILE_2, varName, name="UCT"))
 
 """ Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
 print("Working with the rcmed interface to get CRU3.1 Monthly Mean Precipitation")
-# the dataset_id and the parameter id were determined from  
-# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
-CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
+# the dataset_id and the parameter id were determined from
+# https://rcmes.jpl.nasa.gov/content/data-rcmes-database
+CRU31 = rcmed.parameter_dataset(
+    10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
 
 """ Step 3: Resample Datasets so they are the same shape """
 print("Resampling datasets")
@@ -91,52 +93,59 @@ CRU31 = dsp.water_flux_unit_conversion(CRU31)
 CRU31 = dsp.temporal_rebin(CRU31, datetime.timedelta(days=30))
 
 for member, each_target_dataset in enumerate(target_datasets):
-  target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
-  target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-  target_datasets[member] = dsp.temporal_rebin(target_datasets[member], datetime.timedelta(days=30))    
-    
+    target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
+    target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[
+                                                             member])
+    target_datasets[member] = dsp.temporal_rebin(
+        target_datasets[member], datetime.timedelta(days=30))
+
 
 """ Spatially Regrid the Dataset Objects to a user defined  grid """
-# Using the bounds we will create a new set of lats and lons 
+# Using the bounds we will create a new set of lats and lons
 print("Regridding datasets")
 new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
 new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
 CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
 
 for member, each_target_dataset in enumerate(target_datasets):
-  target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
+    target_datasets[member] = dsp.spatial_regrid(
+        target_datasets[member], new_lats, new_lons)
 
-#make the model ensemble
+# make the model ensemble
 target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name="ENS"
+target_datasets_ensemble.name = "ENS"
 
-#append to the target_datasets for final analysis
+# append to the target_datasets for final analysis
 target_datasets.append(target_datasets_ensemble)
 
-#find the mean value
-#way to get the mean. Note the function exists in util.py 
+# find the mean value
+# way to get the mean. Note the function exists in util.py
 _, CRU31.values = utils.calc_climatology_year(CRU31)
 
 for member, each_target_dataset in enumerate(target_datasets):
-  _,target_datasets[member].values = utils.calc_climatology_year(target_datasets[member])
+    _, target_datasets[member].values = utils.calc_climatology_year(target_datasets[
+                                                                    member])
 
 for target in target_datasets:
-  allNames.append(target.name)
+    allNames.append(target.name)
 
-#determine the metrics
+# determine the metrics
 mean_bias = metrics.Bias()
 
-#create the Evaluation object
-RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31, # Reference dataset for the evaluation
-                                    # list of target datasets for the evaluation
-                                    target_datasets,
-                                    # 1 or more metrics to use in the evaluation
-                                    [mean_bias])   
+# create the Evaluation object
+RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31,  # Reference dataset for the evaluation
+                                               # list of target datasets for
+                                               # the evaluation
+                                               target_datasets,
+                                               # 1 or more metrics to use in
+                                               # the evaluation
+                                               [mean_bias])
 RCMs_to_CRU_evaluation.run()
 
-#extract the relevant data from RCMs_to_CRU_evaluation.results 
-#the results returns a list (num_target_datasets, num_metrics). See docs for further details
-#remove the metric dimension
-rcm_bias = RCMs_to_CRU_evaluation.results[0] 
+# extract the relevant data from RCMs_to_CRU_evaluation.results
+# the results returns a list (num_target_datasets, num_metrics). See docs for further details
+# remove the metric dimension
+rcm_bias = RCMs_to_CRU_evaluation.results[0]
 
-plotter.draw_contour_map(rcm_bias, new_lats, new_lons, gridshape=(2, 3),fname=OUTPUT_PLOT, subtitles=allNames, cmap='coolwarm_r')
+plotter.draw_contour_map(rcm_bias, new_lats, new_lons, gridshape=(
+    2, 3), fname=OUTPUT_PLOT, subtitles=allNames, cmap='coolwarm_r')

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/multi_model_taylor_diagram.py
----------------------------------------------------------------------
diff --git a/examples/multi_model_taylor_diagram.py b/examples/multi_model_taylor_diagram.py
index b7daf61..31d4020 100644
--- a/examples/multi_model_taylor_diagram.py
+++ b/examples/multi_model_taylor_diagram.py
@@ -1,4 +1,4 @@
-#Apache OCW lib immports
+# Apache OCW lib immports
 from ocw.dataset import Dataset, Bounds
 import ocw.data_source.local as local
 import ocw.data_source.rcmed as rcmed
@@ -15,12 +15,12 @@ import urllib
 from os import path
 import ssl
 if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
+    ssl._create_default_https_context = ssl._create_unverified_context
 
 
 # File URL leader
 FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Three Local Model Files 
+# Three Local Model Files
 FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
 FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
 FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
@@ -28,25 +28,26 @@ FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
 OUTPUT_PLOT = "pr_africa_taylor"
 
 # Spatial and temporal configurations
-LAT_MIN = -45.0 
+LAT_MIN = -45.0
 LAT_MAX = 42.24
 LON_MIN = -24.0
-LON_MAX = 60.0 
+LON_MAX = 60.0
 START = datetime.datetime(2000, 01, 1)
 END = datetime.datetime(2007, 12, 31)
-EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX, lon_min=LON_MIN, lon_max=LON_MAX, start=START, end=END)
+EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX,
+                     lon_min=LON_MIN, lon_max=LON_MAX, start=START, end=END)
 
-#variable that we are analyzing
-varName = 'pr' 
+# variable that we are analyzing
+varName = 'pr'
 
-#regridding parameters
-gridLonStep=0.5
-gridLatStep=0.5
+# regridding parameters
+gridLonStep = 0.5
+gridLatStep = 0.5
 
-#some vars for this evaluation
-target_datasets_ensemble=[]
-target_datasets =[]
-ref_datasets =[]
+# some vars for this evaluation
+target_datasets_ensemble = []
+target_datasets = []
+ref_datasets = []
 
 # Download necessary NetCDF file if not present
 if path.exists(FILE_1):
@@ -72,70 +73,75 @@ target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
 
 """ Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
 print("Working with the rcmed interface to get CRU3.1 Monthly Mean Precipitation")
-# the dataset_id and the parameter id were determined from  
-# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
-CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
+# the dataset_id and the parameter id were determined from
+# https://rcmes.jpl.nasa.gov/content/data-rcmes-database
+CRU31 = rcmed.parameter_dataset(
+    10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
 
 """ Step 3: Resample Datasets so they are the same shape """
 print("Resampling datasets ...")
 print("... on units")
 CRU31 = dsp.water_flux_unit_conversion(CRU31)
 print("... temporal")
-CRU31 = dsp.temporal_rebin(CRU31, temporal_resolution = 'monthly')
+CRU31 = dsp.temporal_rebin(CRU31, temporal_resolution='monthly')
 
 for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-	target_datasets[member] = dsp.temporal_rebin(target_datasets[member], temporal_resolution = 'monthly') 
-	target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
-	
-#Regrid
+    target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[
+                                                             member])
+    target_datasets[member] = dsp.temporal_rebin(
+        target_datasets[member], temporal_resolution='monthly')
+    target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
+
+# Regrid
 print("... regrid")
 new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
 new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
 CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
 
 for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
-	
-#find the mean values
-#way to get the mean. Note the function exists in util.py as def calc_climatology_year(dataset):
+    target_datasets[member] = dsp.spatial_regrid(
+        target_datasets[member], new_lats, new_lons)
+
+# find the mean values
+# way to get the mean. Note the function exists in util.py as def
+# calc_climatology_year(dataset):
 CRU31.values = utils.calc_temporal_mean(CRU31)
 
-#make the model ensemble
+# make the model ensemble
 target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name="ENS"
+target_datasets_ensemble.name = "ENS"
 
-#append to the target_datasets for final analysis
+# append to the target_datasets for final analysis
 target_datasets.append(target_datasets_ensemble)
 
 for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member].values = utils.calc_temporal_mean(target_datasets[member])
-	
-allNames =[]
+    target_datasets[member].values = utils.calc_temporal_mean(target_datasets[
+                                                              member])
+
+allNames = []
 
 for target in target_datasets:
-	allNames.append(target.name)
+    allNames.append(target.name)
 
-#calculate the metrics
+# calculate the metrics
 taylor_diagram = metrics.SpatialPatternTaylorDiagram()
 
 
-#create the Evaluation object
-RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31, # Reference dataset for the evaluation
-                                    # 1 or more target datasets for the evaluation                
-                                    target_datasets,
-                                    # 1 or more metrics to use in the evaluation
-                                    [taylor_diagram])#, mean_bias,spatial_std_dev_ratio, pattern_correlation])   
+# create the Evaluation object
+RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31,  # Reference dataset for the evaluation
+                                               # 1 or more target datasets for
+                                               # the evaluation
+                                               target_datasets,
+                                               # 1 or more metrics to use in
+                                               # the evaluation
+                                               [taylor_diagram])  # , mean_bias,spatial_std_dev_ratio, pattern_correlation])
 RCMs_to_CRU_evaluation.run()
 
 taylor_data = RCMs_to_CRU_evaluation.results[0]
 
 plotter.draw_taylor_diagram(taylor_data,
-                        allNames, 
-                        "CRU31",
-                        fname=OUTPUT_PLOT,
-                        fmt='png',
-                        frameon=False)
-
-                              
-
+                            allNames,
+                            "CRU31",
+                            fname=OUTPUT_PLOT,
+                            fmt='png',
+                            frameon=False)

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/simple_model_to_model_bias.py
----------------------------------------------------------------------
diff --git a/examples/simple_model_to_model_bias.py b/examples/simple_model_to_model_bias.py
index 44d482b..ffa5cda 100644
--- a/examples/simple_model_to_model_bias.py
+++ b/examples/simple_model_to_model_bias.py
@@ -29,7 +29,7 @@ import ocw.plotter as plotter
 
 # File URL leader
 FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Two Local Model Files 
+# Two Local Model Files
 FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
 FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc"
 # Filename for the output image/plot (without file extension)
@@ -46,11 +46,13 @@ if not path.exists(FILE_2_PATH):
 """ Step 1: Load Local NetCDF Files into OCW Dataset Objects """
 print("Loading %s into an OCW Dataset Object" % (FILE_1_PATH,))
 knmi_dataset = local.load_file(FILE_1_PATH, "tasmax")
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
+print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" %
+      (knmi_dataset.values.shape,))
 
 print("Loading %s into an OCW Dataset Object" % (FILE_2_PATH,))
 wrf_dataset = local.load_file(FILE_2_PATH, "tasmax")
-print("WRF_Dataset.values shape: (times, lats, lons) - %s \n" % (wrf_dataset.values.shape,))
+print("WRF_Dataset.values shape: (times, lats, lons) - %s \n" %
+      (wrf_dataset.values.shape,))
 
 """ Step 2: Temporally Rebin the Data into an Annual Timestep """
 print("Temporally Rebinning the Datasets to an Annual Timestep")
@@ -115,10 +117,10 @@ print("Generating a contour map using ocw.plotter.draw_contour_map()")
 lats = new_lats
 lons = new_lons
 fname = OUTPUT_PLOT
-gridshape = (4, 5) # 20 Years worth of plots. 20 rows in 1 column
+gridshape = (4, 5)  # 20 Years worth of plots. 20 rows in 1 column
 plot_title = "TASMAX Bias of WRF Compared to KNMI (1989 - 2008)"
 sub_titles = range(1989, 2009, 1)
 
-plotter.draw_contour_map(results, lats, lons, fname, 
-                         gridshape=gridshape, ptitle=plot_title, 
+plotter.draw_contour_map(results, lats, lons, fname,
+                         gridshape=gridshape, ptitle=plot_title,
                          subtitles=sub_titles)

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/simple_model_tstd.py
----------------------------------------------------------------------
diff --git a/examples/simple_model_tstd.py b/examples/simple_model_tstd.py
index 4c87813..79c19d2 100644
--- a/examples/simple_model_tstd.py
+++ b/examples/simple_model_tstd.py
@@ -59,7 +59,8 @@ std = metrics.TemporalStdDev()
 # Evaluation can take in multiple targets and metrics, so we need to convert
 # our examples into Python lists.  Evaluation will iterate over the lists
 print "Making the Evaluation definition"
-# Temporal STD Metric gets one target dataset then reference dataset should be None
+# Temporal STD Metric gets one target dataset then reference dataset
+# should be None
 std_evaluation = evaluation.Evaluation(None, [knmi_dataset], [std])
 print "Executing the Evaluation using the object's run() method"
 std_evaluation.run()
@@ -80,7 +81,7 @@ print "The results are of type: %s" % type(results)
 print "Generating a contour map using ocw.plotter.draw_contour_map()"
 
 fname = OUTPUT_PLOT
-gridshape = (4, 5) # 20 Years worth of plots. 20 rows in 1 column
+gridshape = (4, 5)  # 20 Years worth of plots. 20 rows in 1 column
 plot_title = "TASMAX Temporal Standard Deviation (1989 - 2008)"
 sub_titles = range(1989, 2009, 1)
 

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/subregions_portrait_diagram.py
----------------------------------------------------------------------
diff --git a/examples/subregions_portrait_diagram.py b/examples/subregions_portrait_diagram.py
index 525cb26..e85286f 100644
--- a/examples/subregions_portrait_diagram.py
+++ b/examples/subregions_portrait_diagram.py
@@ -1,4 +1,4 @@
-#Apache OCW lib immports
+# Apache OCW lib immports
 from ocw.dataset import Dataset, Bounds
 import ocw.data_source.local as local
 import ocw.data_source.rcmed as rcmed
@@ -16,11 +16,11 @@ from os import path
 import urllib
 import ssl
 if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
+    ssl._create_default_https_context = ssl._create_unverified_context
 
 # File URL leader
 FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Three Local Model Files 
+# Three Local Model Files
 FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
 FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
 FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
@@ -28,22 +28,22 @@ FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
 OUTPUT_PLOT = "portrait_diagram"
 
 # Spatial and temporal configurations
-LAT_MIN = -45.0 
+LAT_MIN = -45.0
 LAT_MAX = 42.24
 LON_MIN = -24.0
-LON_MAX = 60.0 
+LON_MAX = 60.0
 START = datetime.datetime(2000, 01, 1)
 END = datetime.datetime(2007, 12, 31)
 EVAL_BOUNDS = Bounds(LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
 
-#variable that we are analyzing
-varName = 'pr' 
+# variable that we are analyzing
+varName = 'pr'
 
-#regridding parameters
+# regridding parameters
 gridLonStep = 0.5
 gridLatStep = 0.5
 
-#some vars for this evaluation
+# some vars for this evaluation
 target_datasets_ensemble = []
 target_datasets = []
 allNames = []
@@ -65,9 +65,10 @@ target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
 
 """ Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
 print("Working with the rcmed interface to get CRU3.1 Monthly Mean Precipitation")
-# the dataset_id and the parameter id were determined from  
-# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
-CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
+# the dataset_id and the parameter id were determined from
+# https://rcmes.jpl.nasa.gov/content/data-rcmes-database
+CRU31 = rcmed.parameter_dataset(
+    10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
 
 """ Step 3: Processing Datasets so they are the same shape """
 print("Processing datasets ...")
@@ -76,67 +77,73 @@ print("... on units")
 CRU31 = dsp.water_flux_unit_conversion(CRU31)
 
 for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
-	target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-	target_datasets[member] = dsp.normalize_dataset_datetimes(target_datasets[member], 'monthly') 		
-		
+    target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
+    target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[
+                                                             member])
+    target_datasets[member] = dsp.normalize_dataset_datetimes(
+        target_datasets[member], 'monthly')
+
 print("... spatial regridding")
 new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
 new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
 CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
 
 for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
-	
-#find the total annual mean. Note the function exists in util.py as def calc_climatology_year(dataset):
-_,CRU31.values = utils.calc_climatology_year(CRU31)
+    target_datasets[member] = dsp.spatial_regrid(
+        target_datasets[member], new_lats, new_lons)
+
+# find the total annual mean. Note the function exists in util.py as def
+# calc_climatology_year(dataset):
+_, CRU31.values = utils.calc_climatology_year(CRU31)
 
 for member, each_target_dataset in enumerate(target_datasets):
-	_, target_datasets[member].values = utils.calc_climatology_year(target_datasets[member])
+    _, target_datasets[member].values = utils.calc_climatology_year(target_datasets[
+                                                                    member])
 
-#make the model ensemble
+# make the model ensemble
 target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name="ENS"
+target_datasets_ensemble.name = "ENS"
 
-#append to the target_datasets for final analysis
+# append to the target_datasets for final analysis
 target_datasets.append(target_datasets_ensemble)
 
 for target in target_datasets:
-	allNames.append(target.name)
+    allNames.append(target.name)
 
 list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5), 
- Bounds(0.0, 10.0,  29.0, 37.5), 
- Bounds(10.0, 20.0, 25.0, 32.5), 
- Bounds(20.0, 33.0, 25.0, 32.5), 
- Bounds(-19.3,-10.2,12.0, 20.0), 
- Bounds( 15.0, 30.0, 15.0, 25.0),
- Bounds(-10.0, 10.0, 7.3, 15.0), 
- Bounds(-10.9, 10.0, 5.0, 7.3),  
- Bounds(33.9, 40.0,  6.9, 15.0), 
- Bounds(10.0, 25.0,  0.0, 10.0), 
- Bounds(10.0, 25.0,-10.0,  0.0), 
- Bounds(30.0, 40.0,-15.0,  0.0), 
- Bounds(33.0, 40.0, 25.0, 35.00)]
-
-region_list=["R"+str(i+1) for i in xrange(13)]
-
-#metrics
+    Bounds(-10.0, 0.0, 29.0, 36.5),
+    Bounds(0.0, 10.0,  29.0, 37.5),
+    Bounds(10.0, 20.0, 25.0, 32.5),
+    Bounds(20.0, 33.0, 25.0, 32.5),
+    Bounds(-19.3, -10.2, 12.0, 20.0),
+    Bounds(15.0, 30.0, 15.0, 25.0),
+    Bounds(-10.0, 10.0, 7.3, 15.0),
+    Bounds(-10.9, 10.0, 5.0, 7.3),
+    Bounds(33.9, 40.0,  6.9, 15.0),
+    Bounds(10.0, 25.0,  0.0, 10.0),
+    Bounds(10.0, 25.0, -10.0,  0.0),
+    Bounds(30.0, 40.0, -15.0,  0.0),
+    Bounds(33.0, 40.0, 25.0, 35.00)]
+
+region_list = ["R" + str(i + 1) for i in xrange(13)]
+
+# metrics
 pattern_correlation = metrics.PatternCorrelation()
 
-#create the Evaluation object
-RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31, # Reference dataset for the evaluation
-                                    # 1 or more target datasets for the evaluation
-                                    target_datasets,
-                                    # 1 or more metrics to use in the evaluation
-                                    [pattern_correlation], 
-                                    # list of subregion Bounds Objects
-                                    list_of_regions)   
+# create the Evaluation object
+RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31,  # Reference dataset for the evaluation
+                                               # 1 or more target datasets for
+                                               # the evaluation
+                                               target_datasets,
+                                               # 1 or more metrics to use in
+                                               # the evaluation
+                                               [pattern_correlation],
+                                               # list of subregion Bounds
+                                               # Objects
+                                               list_of_regions)
 RCMs_to_CRU_evaluation.run()
 
 new_patcor = np.squeeze(np.array(RCMs_to_CRU_evaluation.results), axis=1)
 
-plotter.draw_portrait_diagram(np.transpose(new_patcor),allNames, region_list, fname=OUTPUT_PLOT, fmt='png', cmap='coolwarm_r')
-
-                              
-
+plotter.draw_portrait_diagram(np.transpose(
+    new_patcor), allNames, region_list, fname=OUTPUT_PLOT, fmt='png', cmap='coolwarm_r')

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/subregions_rectangular_boundaries.py
----------------------------------------------------------------------
diff --git a/examples/subregions_rectangular_boundaries.py b/examples/subregions_rectangular_boundaries.py
index 20aaee9..7fcf0e8 100644
--- a/examples/subregions_rectangular_boundaries.py
+++ b/examples/subregions_rectangular_boundaries.py
@@ -1,4 +1,4 @@
-#Apache OCW lib immports
+# Apache OCW lib immports
 from ocw.dataset import Dataset, Bounds
 import ocw.data_source.local as local
 import ocw.data_source.rcmed as rcmed
@@ -15,39 +15,37 @@ import numpy.ma as ma
 OUTPUT_PLOT = "subregions"
 
 # Spatial and temporal configurations
-LAT_MIN = -45.0 
+LAT_MIN = -45.0
 LAT_MAX = 42.24
 LON_MIN = -24.0
-LON_MAX = 60.0 
+LON_MAX = 60.0
 START_SUB = datetime.datetime(2000, 01, 1)
 END_SUB = datetime.datetime(2007, 12, 31)
 
-#regridding parameters
-gridLonStep=0.5
-gridLatStep=0.5
+# regridding parameters
+gridLonStep = 0.5
+gridLatStep = 0.5
 
-#Regrid
+# Regrid
 print("... regrid")
 new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
 new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
 
 list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5, START_SUB, END_SUB), 
- Bounds(0.0, 10.0,  29.0, 37.5, START_SUB, END_SUB),
- Bounds(10.0, 20.0, 25.0, 32.5, START_SUB, END_SUB),
- Bounds(20.0, 33.0, 25.0, 32.5, START_SUB, END_SUB),
- Bounds(-19.3,-10.2,12.0, 20.0, START_SUB, END_SUB),
- Bounds( 15.0, 30.0, 15.0, 25.0,START_SUB, END_SUB),
- Bounds(-10.0, 10.0, 7.3, 15.0, START_SUB, END_SUB),
- Bounds(-10.9, 10.0, 5.0, 7.3,  START_SUB, END_SUB),
- Bounds(33.9, 40.0,  6.9, 15.0, START_SUB, END_SUB),
- Bounds(10.0, 25.0,  0.0, 10.0, START_SUB, END_SUB),
- Bounds(10.0, 25.0,-10.0,  0.0, START_SUB, END_SUB),
- Bounds(30.0, 40.0,-15.0,  0.0, START_SUB, END_SUB),
- Bounds(33.0, 40.0, 25.0, 35.0, START_SUB, END_SUB)]
-
-#for plotting the subregions
-plotter.draw_subregions(list_of_regions, new_lats, new_lons, OUTPUT_PLOT, fmt='png')
-
-                               
-
+    Bounds(-10.0, 0.0, 29.0, 36.5, START_SUB, END_SUB),
+    Bounds(0.0, 10.0,  29.0, 37.5, START_SUB, END_SUB),
+    Bounds(10.0, 20.0, 25.0, 32.5, START_SUB, END_SUB),
+    Bounds(20.0, 33.0, 25.0, 32.5, START_SUB, END_SUB),
+    Bounds(-19.3, -10.2, 12.0, 20.0, START_SUB, END_SUB),
+    Bounds(15.0, 30.0, 15.0, 25.0, START_SUB, END_SUB),
+    Bounds(-10.0, 10.0, 7.3, 15.0, START_SUB, END_SUB),
+    Bounds(-10.9, 10.0, 5.0, 7.3,  START_SUB, END_SUB),
+    Bounds(33.9, 40.0,  6.9, 15.0, START_SUB, END_SUB),
+    Bounds(10.0, 25.0,  0.0, 10.0, START_SUB, END_SUB),
+    Bounds(10.0, 25.0, -10.0,  0.0, START_SUB, END_SUB),
+    Bounds(30.0, 40.0, -15.0,  0.0, START_SUB, END_SUB),
+    Bounds(33.0, 40.0, 25.0, 35.0, START_SUB, END_SUB)]
+
+# for plotting the subregions
+plotter.draw_subregions(list_of_regions, new_lats,
+                        new_lons, OUTPUT_PLOT, fmt='png')

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/subset_TRMM_data_for_NCA_regions.py
----------------------------------------------------------------------
diff --git a/examples/subset_TRMM_data_for_NCA_regions.py b/examples/subset_TRMM_data_for_NCA_regions.py
index bc946a2..90b752b 100644
--- a/examples/subset_TRMM_data_for_NCA_regions.py
+++ b/examples/subset_TRMM_data_for_NCA_regions.py
@@ -15,7 +15,7 @@
 # specific language governing permissions and limitations
 # under the License.
 
-#Apache OCW lib immports
+# Apache OCW lib immports
 import ocw.dataset_processor as dsp
 import ocw.utils as utils
 from ocw.dataset import Bounds
@@ -28,7 +28,7 @@ import numpy.ma as ma
 import ssl
 
 if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
+    ssl._create_default_https_context = ssl._create_unverified_context
 
 # rectangular boundary
 min_lat = 15.75
@@ -36,21 +36,25 @@ max_lat = 55.75
 min_lon = -125.75
 max_lon = -66.75
 
-start_time = datetime(1998,1,1)
-end_time = datetime(1998,12,31)
+start_time = datetime(1998, 1, 1)
+end_time = datetime(1998, 12, 31)
 
 TRMM_dataset = rcmed.parameter_dataset(3, 36, min_lat, max_lat, min_lon, max_lon,
-                                            start_time, end_time)
+                                       start_time, end_time)
 
-Cuba_and_Bahamas_bounds = Bounds(boundary_type='countries', countries=['Cuba','Bahamas'])
-TRMM_dataset2 = dsp.subset(TRMM_dataset, Cuba_and_Bahamas_bounds, extract=False) # to mask out the data over Mexico and Canada
-
-plotter.draw_contour_map(ma.mean(TRMM_dataset2.values, axis=0), TRMM_dataset2.lats, TRMM_dataset2.lons, fname='TRMM_without_Cuba_and_Bahamas')
-
-NCA_SW_bounds = Bounds(boundary_type='us_states', us_states=['CA','NV','UT','AZ','NM','CO'])
-TRMM_dataset3 = dsp.subset(TRMM_dataset2, NCA_SW_bounds, extract=True) # to mask out the data over Mexico and Canada
-
-plotter.draw_contour_map(ma.mean(TRMM_dataset3.values, axis=0), TRMM_dataset3.lats, TRMM_dataset3.lons, fname='TRMM_NCA_SW')
+Cuba_and_Bahamas_bounds = Bounds(
+    boundary_type='countries', countries=['Cuba', 'Bahamas'])
+# to mask out the data over Mexico and Canada
+TRMM_dataset2 = dsp.subset(
+    TRMM_dataset, Cuba_and_Bahamas_bounds, extract=False)
 
+plotter.draw_contour_map(ma.mean(TRMM_dataset2.values, axis=0), TRMM_dataset2.lats,
+                         TRMM_dataset2.lons, fname='TRMM_without_Cuba_and_Bahamas')
 
+NCA_SW_bounds = Bounds(boundary_type='us_states', us_states=[
+                       'CA', 'NV', 'UT', 'AZ', 'NM', 'CO'])
+# to mask out the data over Mexico and Canada
+TRMM_dataset3 = dsp.subset(TRMM_dataset2, NCA_SW_bounds, extract=True)
 
+plotter.draw_contour_map(ma.mean(TRMM_dataset3.values, axis=0),
+                         TRMM_dataset3.lats, TRMM_dataset3.lons, fname='TRMM_NCA_SW')

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/taylor_diagram_example.py
----------------------------------------------------------------------
diff --git a/examples/taylor_diagram_example.py b/examples/taylor_diagram_example.py
index bae85be..86236c8 100644
--- a/examples/taylor_diagram_example.py
+++ b/examples/taylor_diagram_example.py
@@ -34,7 +34,7 @@ FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
 FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc"
 
 # Download some example NetCDF files for the evaluation
-################################################################################
+##########################################################################
 if not path.exists(FILE_1):
     urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
 
@@ -44,7 +44,7 @@ if not path.exists(FILE_2):
 # Load the example datasets into OCW Dataset objects. We want to load
 # the 'tasmax' variable values. We'll also name the datasets for use
 # when plotting.
-################################################################################
+##########################################################################
 knmi_dataset = local.load_file(FILE_1, "tasmax")
 wrf_dataset = local.load_file(FILE_2, "tasmax")
 
@@ -54,24 +54,25 @@ wrf_dataset.name = "wrf"
 # Date values from loaded datasets might not always fall on reasonable days.
 # With monthly data, we could have data falling on the 1st, 15th, or some other
 # day of the month. Let's fix that real quick.
-################################################################################
+##########################################################################
 knmi_dataset = dsp.normalize_dataset_datetimes(knmi_dataset, 'monthly')
 wrf_dataset = dsp.normalize_dataset_datetimes(wrf_dataset, 'monthly')
 
 # We're only going to run this evaluation over a years worth of data. We'll
 # make a Bounds object and use it to subset our datasets.
-################################################################################
-subset = Bounds(lat_min=-45, lat_max=42, lon_min=-24, lon_max=60, start=datetime.datetime(1989, 1, 1), end=datetime.datetime(1989, 12, 1))
+##########################################################################
+subset = Bounds(lat_min=-45, lat_max=42, lon_min=-24, lon_max=60,
+                start=datetime.datetime(1989, 1, 1), end=datetime.datetime(1989, 12, 1))
 knmi_dataset = dsp.subset(knmi_dataset, subset)
 wrf_dataset = dsp.subset(wrf_dataset, subset)
 
 # Temporally re-bin the data into a monthly timestep.
-################################################################################
-knmi_dataset = dsp.temporal_rebin(knmi_dataset, temporal_resolution = 'monthly')
-wrf_dataset = dsp.temporal_rebin(wrf_dataset, temporal_resolution = 'monthly')
+##########################################################################
+knmi_dataset = dsp.temporal_rebin(knmi_dataset, temporal_resolution='monthly')
+wrf_dataset = dsp.temporal_rebin(wrf_dataset, temporal_resolution='monthly')
 
 # Spatially regrid the datasets onto a 1 degree grid.
-################################################################################
+##########################################################################
 # Get the bounds of the reference dataset and use it to create a new
 # set of lat/lon values on a 1 degree step
 # Using the bounds we will create a new set of lats and lons on 1 degree step
@@ -84,23 +85,24 @@ knmi_dataset = dsp.spatial_regrid(knmi_dataset, new_lats, new_lons)
 wrf_dataset = dsp.spatial_regrid(wrf_dataset, new_lats, new_lons)
 
 # Load the metrics that we want to use for the evaluation.
-################################################################################
+##########################################################################
 taylor_diagram = metrics.SpatialPatternTaylorDiagram()
 
 # Create our new evaluation object. The knmi dataset is the evaluations
 # reference dataset. We then provide a list of 1 or more target datasets
 # to use for the evaluation. In this case, we only want to use the wrf dataset.
 # Then we pass a list of all the metrics that we want to use in the evaluation.
-################################################################################
-test_evaluation = evaluation.Evaluation(knmi_dataset, [wrf_dataset], [taylor_diagram])
+##########################################################################
+test_evaluation = evaluation.Evaluation(
+    knmi_dataset, [wrf_dataset], [taylor_diagram])
 test_evaluation.run()
 
 # Pull our the evaluation results and prepare them for drawing a Taylor diagram.
-################################################################################
-taylor_data = test_evaluation.results[0]                                                 
+##########################################################################
+taylor_data = test_evaluation.results[0]
 
 # Draw our taylor diagram!
-################################################################################
+##########################################################################
 plotter.draw_taylor_diagram(taylor_data,
                             [wrf_dataset.name],
                             knmi_dataset.name,

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/examples/time_series_with_regions.py
----------------------------------------------------------------------
diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py
index 15e2ee2..05c4721 100644
--- a/examples/time_series_with_regions.py
+++ b/examples/time_series_with_regions.py
@@ -1,4 +1,4 @@
-#Apache OCW lib immports
+# Apache OCW lib immports
 from ocw.dataset import Dataset, Bounds
 import ocw.data_source.local as local
 import ocw.data_source.rcmed as rcmed
@@ -15,39 +15,40 @@ from os import path
 import urllib
 import ssl
 if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
+    ssl._create_default_https_context = ssl._create_unverified_context
 
 
 # File URL leader
 FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Three Local Model Files 
+# Three Local Model Files
 FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
 FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
 FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
 
-LAT_MIN = -45.0 
-LAT_MAX = 42.24 
+LAT_MIN = -45.0
+LAT_MAX = 42.24
 LON_MIN = -24.0
-LON_MAX = 60.0 
+LON_MAX = 60.0
 START = datetime.datetime(2000, 01, 1)
 END = datetime.datetime(2007, 12, 31)
 
-EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX, lon_min=LON_MIN, lon_max=LON_MAX, start=START, end=END)
+EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX,
+                     lon_min=LON_MIN, lon_max=LON_MAX, start=START, end=END)
 
-varName = 'pr' 
-gridLonStep=0.44
-gridLatStep=0.44
+varName = 'pr'
+gridLonStep = 0.44
+gridLatStep = 0.44
 
-#needed vars for the script
-target_datasets =[]
-tSeries =[]
-results =[]
-labels =[] # could just as easily b the names for each subregion
+# needed vars for the script
+target_datasets = []
+tSeries = []
+results = []
+labels = []  # could just as easily b the names for each subregion
 region_counter = 0
 
 # Download necessary NetCDF file if not present
 if not path.exists(FILE_1):
-	urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
+    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
 
 if not path.exists(FILE_2):
     urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
@@ -63,9 +64,10 @@ target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
 
 """ Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
 print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
-# the dataset_id and the parameter id were determined from  
-# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
-CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
+# the dataset_id and the parameter id were determined from
+# https://rcmes.jpl.nasa.gov/content/data-rcmes-database
+CRU31 = rcmed.parameter_dataset(
+    10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
 
 
 """ Step 3: Processing datasets so they are the same shape ... """
@@ -74,10 +76,12 @@ CRU31 = dsp.water_flux_unit_conversion(CRU31)
 CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')
 
 for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
-	target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-	target_datasets[member] = dsp.normalize_dataset_datetimes(target_datasets[member], 'monthly')  		
-	
+    target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS)
+    target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[
+                                                             member])
+    target_datasets[member] = dsp.normalize_dataset_datetimes(
+        target_datasets[member], 'monthly')
+
 print("... spatial regridding")
 new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
 new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
@@ -85,63 +89,63 @@ CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
 
 
 for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
+    target_datasets[member] = dsp.spatial_regrid(
+        target_datasets[member], new_lats, new_lons)
 
-#find climatology monthly for obs and models
+# find climatology monthly for obs and models
 CRU31.values, CRU31.times = utils.calc_climatology_monthly(CRU31)
 
 for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member].values, target_datasets[member].times = utils.calc_climatology_monthly(target_datasets[member])
-		
-#make the model ensemble
+    target_datasets[member].values, target_datasets[
+        member].times = utils.calc_climatology_monthly(target_datasets[member])
+
+# make the model ensemble
 target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name="ENS"
+target_datasets_ensemble.name = "ENS"
 
-#append to the target_datasets for final analysis
+# append to the target_datasets for final analysis
 target_datasets.append(target_datasets_ensemble)
 
 """ Step 4: Subregion stuff """
 list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5), 
- Bounds(0.0, 10.0,  29.0, 37.5), 
- Bounds(10.0, 20.0, 25.0, 32.5),
- Bounds(20.0, 33.0, 25.0, 32.5), 
- Bounds(-19.3,-10.2,12.0, 20.0), 
- Bounds( 15.0, 30.0, 15.0, 25.0),
- Bounds(-10.0, 10.0, 7.3, 15.0), 
- Bounds(-10.9, 10.0, 5.0, 7.3),  
- Bounds(33.9, 40.0,  6.9, 15.0),
- Bounds(10.0, 25.0,  0.0, 10.0), 
- Bounds(10.0, 25.0,-10.0,  0.0), 
- Bounds(30.0, 40.0,-15.0,  0.0), 
- Bounds(33.0, 40.0, 25.0, 35.0)]
-
-region_list=[["R"+str(i+1)] for i in xrange(13)]
+    Bounds(-10.0, 0.0, 29.0, 36.5),
+    Bounds(0.0, 10.0,  29.0, 37.5),
+    Bounds(10.0, 20.0, 25.0, 32.5),
+    Bounds(20.0, 33.0, 25.0, 32.5),
+    Bounds(-19.3, -10.2, 12.0, 20.0),
+    Bounds(15.0, 30.0, 15.0, 25.0),
+    Bounds(-10.0, 10.0, 7.3, 15.0),
+    Bounds(-10.9, 10.0, 5.0, 7.3),
+    Bounds(33.9, 40.0,  6.9, 15.0),
+    Bounds(10.0, 25.0,  0.0, 10.0),
+    Bounds(10.0, 25.0, -10.0,  0.0),
+    Bounds(30.0, 40.0, -15.0,  0.0),
+    Bounds(33.0, 40.0, 25.0, 35.0)]
+
+region_list = [["R" + str(i + 1)] for i in xrange(13)]
 
 for regions in region_list:
-	firstTime = True
-	subset_name = regions[0]+"_CRU31"
-	#labels.append(subset_name) #for legend, uncomment this line
-	subset = dsp.subset(CRU31, list_of_regions[region_counter], subset_name)
-	tSeries = utils.calc_time_series(subset)
-	results.append(tSeries)
-	tSeries=[]
-	firstTime = False
-	for member, each_target_dataset in enumerate(target_datasets):
-		subset_name = regions[0]+"_"+target_datasets[member].name
-		#labels.append(subset_name) #for legend, uncomment this line
-		subset = dsp.subset(target_datasets[member],
-							list_of_regions[region_counter],
-							subset_name)
-		tSeries = utils.calc_time_series(subset)
-		results.append(tSeries)
-		tSeries=[]
-	
-	plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[0], ptitle=regions[0],fmt='png')
-	results =[]
-	tSeries =[]
-	labels =[]
-	region_counter+=1
-			
-                               
-
+    firstTime = True
+    subset_name = regions[0] + "_CRU31"
+    # labels.append(subset_name) #for legend, uncomment this line
+    subset = dsp.subset(CRU31, list_of_regions[region_counter], subset_name)
+    tSeries = utils.calc_time_series(subset)
+    results.append(tSeries)
+    tSeries = []
+    firstTime = False
+    for member, each_target_dataset in enumerate(target_datasets):
+        subset_name = regions[0] + "_" + target_datasets[member].name
+        # labels.append(subset_name) #for legend, uncomment this line
+        subset = dsp.subset(target_datasets[member],
+                            list_of_regions[region_counter],
+                            subset_name)
+        tSeries = utils.calc_time_series(subset)
+        results.append(tSeries)
+        tSeries = []
+
+    plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[
+                             0], ptitle=regions[0], fmt='png')
+    results = []
+    tSeries = []
+    labels = []
+    region_counter += 1

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/ez_setup.py
----------------------------------------------------------------------
diff --git a/ez_setup.py b/ez_setup.py
index d7bfe97..e9113c6 100644
--- a/ez_setup.py
+++ b/ez_setup.py
@@ -51,6 +51,7 @@ except ImportError:
 DEFAULT_VERSION = "8.2.1"
 DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
 
+
 def _python_cmd(*args):
     """
     Return True if the command succeeded.
@@ -142,7 +143,7 @@ def _do_download(version, download_base, to_dir, download_delay):
 
 
 def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-        to_dir=os.curdir, download_delay=15):
+                   to_dir=os.curdir, download_delay=15):
     to_dir = os.path.abspath(to_dir)
     rep_modules = 'pkg_resources', 'setuptools'
     imported = set(sys.modules).intersection(rep_modules)
@@ -171,6 +172,7 @@ def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
         del pkg_resources, sys.modules['pkg_resources']
         return _do_download(version, download_base, to_dir, download_delay)
 
+
 def _clean_check(cmd, target):
     """
     Run the command to download target. If the command fails, clean up before
@@ -183,6 +185,7 @@ def _clean_check(cmd, target):
             os.unlink(target)
         raise
 
+
 def download_file_powershell(url, target):
     """
     Download the file at url to target using Powershell (which will validate
@@ -202,6 +205,7 @@ def download_file_powershell(url, target):
     ]
     _clean_check(cmd, target)
 
+
 def has_powershell():
     if platform.system() != 'Windows':
         return False
@@ -215,10 +219,12 @@ def has_powershell():
 
 download_file_powershell.viable = has_powershell
 
+
 def download_file_curl(url, target):
     cmd = ['curl', url, '--silent', '--output', target]
     _clean_check(cmd, target)
 
+
 def has_curl():
     cmd = ['curl', '--version']
     with open(os.path.devnull, 'wb') as devnull:
@@ -230,10 +236,12 @@ def has_curl():
 
 download_file_curl.viable = has_curl
 
+
 def download_file_wget(url, target):
     cmd = ['wget', url, '--quiet', '--output-document', target]
     _clean_check(cmd, target)
 
+
 def has_wget():
     cmd = ['wget', '--version']
     with open(os.path.devnull, 'wb') as devnull:
@@ -245,6 +253,7 @@ def has_wget():
 
 download_file_wget.viable = has_wget
 
+
 def download_file_insecure(url, target):
     """
     Use Python to download the file, even though it cannot authenticate the
@@ -263,6 +272,7 @@ def download_file_insecure(url, target):
 
 download_file_insecure.viable = lambda: True
 
+
 def get_best_downloader():
     downloaders = (
         download_file_powershell,
@@ -273,8 +283,9 @@ def get_best_downloader():
     viable_downloaders = (dl for dl in downloaders if dl.viable())
     return next(viable_downloaders, None)
 
+
 def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-        to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
+                        to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
     """
     Download setuptools from a specified location and return its filename
     `version` should be a valid setuptools version number that is available
@@ -296,12 +307,14 @@ def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
         downloader(url, saveto)
     return os.path.realpath(saveto)
 
+
 def _build_install_args(options):
     """
     Build the arguments to 'python setup.py install' on the setuptools package
     """
     return ['--user'] if options.user_install else []
 
+
 def _parse_args():
     """
     Parse the command line for options
@@ -327,6 +340,7 @@ def _parse_args():
     # positional arguments are ignored
     return options
 
+
 def main():
     """Install or upgrade setuptools and EasyInstall"""
     options = _parse_args()
@@ -338,4 +352,4 @@ def main():
     return _install(archive, _build_install_args(options))
 
 if __name__ == '__main__':
-    sys.exit(main())
\ No newline at end of file
+    sys.exit(main())

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/mccsearch/code/mainProg.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mainProg.py b/mccsearch/code/mainProg.py
index 71d6278..fc3a752 100644
--- a/mccsearch/code/mainProg.py
+++ b/mccsearch/code/mainProg.py
@@ -27,66 +27,65 @@ import subprocess
 def main():
     CEGraph = nx.DiGraph()
     prunedGraph = nx.DiGraph()
-    MCCList =[]
-    MCSList=[]
-    MCSMCCNodesList =[]
-    allMCSsList =[]
-    allCETRMMList =[]
+    MCCList = []
+    MCSList = []
+    MCSMCCNodesList = []
+    allMCSsList = []
+    allCETRMMList = []
 
-    #for GrADs
+    # for GrADs
     subprocess.call('export DISPLAY=:0.0', shell=True)
 
-    mainDirStr= "/directory/to/where/to/store/outputs"
-    TRMMdirName = "/directory/to/the/TRMM/netCDF/files" 
+    mainDirStr = "/directory/to/where/to/store/outputs"
+    TRMMdirName = "/directory/to/the/TRMM/netCDF/files"
     CEoriDirName = "/directory/to/the/MERG/netCDF/files"
-   
-    #for first time working with the raw MERG zipped files 
+
+    # for first time working with the raw MERG zipped files
     # mccSearch.preprocessingMERG("/directory/to/where/the/raw/MERG/files/are")
     # ---------------------------------------------------------------------------------
 
-
-    #create main directory and file structure for storing intel
+    # create main directory and file structure for storing intel
     mccSearch.createMainDirectory(mainDirStr)
-    TRMMCEdirName = mainDirStr+'/TRMMnetcdfCEs'
-    CEdirName = mainDirStr+'/MERGnetcdfCEs'
+    TRMMCEdirName = mainDirStr + '/TRMMnetcdfCEs'
+    CEdirName = mainDirStr + '/MERGnetcdfCEs'
 
     # for doing some postprocessing with the clipped datasets instead of running the full program, e.g.
     # mccSearch.postProcessingNetCDF(3,CEoriDirName)
     # mccSearch.postProcessingNetCDF(2)
     # -------------------------------------------------------------------------------------------------
 
-    #let's go!
+    # let's go!
     print "\n -------------- Read MERG Data ----------"
     mergImgs, timeList = mccSearch.readMergData(CEoriDirName)
-    print ("-"*80)
+    print ("-" * 80)
 
     print 'in main', len(mergImgs)
-    #print 'timeList', timeList
+    # print 'timeList', timeList
     print 'TRMMdirName ', TRMMdirName
     print "\n -------------- TESTING findCloudElements ----------"
-    CEGraph = mccSearch.findCloudElements(mergImgs,timeList,TRMMdirName)
-    #if the TRMMdirName wasnt entered for whatever reason, you can still get the TRMM data this way
+    CEGraph = mccSearch.findCloudElements(mergImgs, timeList, TRMMdirName)
+    # if the TRMMdirName wasnt entered for whatever reason, you can still get the TRMM data this way
     # CEGraph = mccSearch.findCloudElements(mergImgs,timeList)
     # allCETRMMList=mccSearch.findPrecipRate(TRMMdirName,timeList)
     # ----------------------------------------------------------------------------------------------
-    print ("-"*80)
+    print ("-" * 80)
     print "number of nodes in CEGraph is: ", CEGraph.number_of_nodes()
-    print ("-"*80)    
+    print ("-" * 80)
     print "\n -------------- TESTING findCloudClusters ----------"
     prunedGraph = mccSearch.findCloudClusters(CEGraph)
-    print ("-"*80)
+    print ("-" * 80)
     print "number of nodes in prunedGraph is: ", prunedGraph.number_of_nodes()
-    print ("-"*80)
+    print ("-" * 80)
     print "\n -------------- TESTING findMCCs ----------"
-    MCCList,MCSList = mccSearch.findMCC(prunedGraph)
-    print ("-"*80)
+    MCCList, MCSList = mccSearch.findMCC(prunedGraph)
+    print ("-" * 80)
     print "MCC List has been acquired ", len(MCCList)
     print "MCS List has been acquired ", len(MCSList)
-    print ("-"*80)
-    #now ready to perform various calculations/metrics
+    print ("-" * 80)
+    # now ready to perform various calculations/metrics
     print "\n -------------- TESTING METRICS ----------"
 
-    #some calculations/metrics that work that work
+    # some calculations/metrics that work that work
     # print "creating the MCC userfile ", mccSearch.createTextFile(MCCList,1)
     # print "creating the MCS userfile ", mccSearch.createTextFile(MCSList,2)
     # MCCTimes, tdelta = mccSearch.temporalAndAreaInfoMetric(MCCList)
@@ -95,9 +94,9 @@ def main():
     # print "shortest duration is: ", mccSearch.shortestDuration(MCCTimes), "hrs"
     # #print "Average duration is: ", mccSearch.convert_timedelta(mccSearch.averageMCCLength(MCCTimes))
     # print "Average duration is: ", mccSearch.averageDuration(MCCTimes), "hrs"
-    # print "Average size is: ", mccSearch.averageFeatureSize(MCCList), "km^2" 
-    
-    #some plots that work
+    # print "Average size is: ", mccSearch.averageFeatureSize(MCCList), "km^2"
+
+    # some plots that work
     # mccSearch.plotAccTRMM(MCCList)
     mccSearch.displayPrecip(MCCList)
     # mccSearch.plotAccuInTimeRange('2009-09-01_00:00:00', '2009-09-01_09:00:00')
@@ -105,6 +104,6 @@ def main():
     # mccSearch.displayPrecip(MCCList)
     # mccSearch.plotHistogram(MCCList)
     #
-    print ("-"*80)
-    
-main()
\ No newline at end of file
+    print ("-" * 80)
+
+main()

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/mccsearch/code/mainProgTemplate.py
----------------------------------------------------------------------
diff --git a/mccsearch/code/mainProgTemplate.py b/mccsearch/code/mainProgTemplate.py
index d43788e..8ef0d0b 100644
--- a/mccsearch/code/mainProgTemplate.py
+++ b/mccsearch/code/mainProgTemplate.py
@@ -31,67 +31,66 @@ import subprocess
 def main():
     CEGraph = nx.DiGraph()
     prunedGraph = nx.DiGraph()
-    MCCList =[]
-    MCSList=[]
-    MCSMCCNodesList =[]
-    allMCSsList =[]
-    allCETRMMList =[]
+    MCCList = []
+    MCSList = []
+    MCSMCCNodesList = []
+    allMCSsList = []
+    allCETRMMList = []
 
-    #for GrADs
+    # for GrADs
     subprocess.call('export DISPLAY=:0.0', shell=True)
 
-    mainDirStr= "/directory/to/where/to/store/outputs"
-    TRMMdirName = "/directory/to/the/TRMM/netCDF/files" 
+    mainDirStr = "/directory/to/where/to/store/outputs"
+    TRMMdirName = "/directory/to/the/TRMM/netCDF/files"
     CEoriDirName = "/directory/to/the/MERG/netCDF/files"
 
-    #for first time working with the raw MERG zipped files 
+    # for first time working with the raw MERG zipped files
     # mccSearch.preprocessingMERG("/Users/kimwhitehall/Documents/HU/research/DATA")
     # ---------------------------------------------------------------------------------
 
-
-    #create main directory and file structure for storing intel
+    # create main directory and file structure for storing intel
     mccSearch.createMainDirectory(mainDirStr)
-    TRMMCEdirName = mainDirStr+'/TRMMnetcdfCEs'
-    CEdirName = mainDirStr+'/MERGnetcdfCEs'
+    TRMMCEdirName = mainDirStr + '/TRMMnetcdfCEs'
+    CEdirName = mainDirStr + '/MERGnetcdfCEs'
 
     # for doing some postprocessing with the clipped datasets instead of running the full program, e.g.
     # mccSearch.postProcessingNetCDF(3,CEoriDirName)
     # mccSearch.postProcessingNetCDF(2)
     # -------------------------------------------------------------------------------------------------
 
-    #let's go!
+    # let's go!
     print "\n -------------- Read MERG Data ----------"
     mergImgs, timeList = mccSearch.readMergData(CEoriDirName)
-    print ("-"*80)
+    print ("-" * 80)
 
     print 'in main', len(mergImgs)
-    #print 'timeList', timeList
+    # print 'timeList', timeList
     print 'TRMMdirName ', TRMMdirName
     print "\n -------------- TESTING findCloudElements ----------"
-    CEGraph = mccSearch.findCloudElements(mergImgs,timeList,TRMMdirName)
-    #if the TRMMdirName wasnt entered for whatever reason, you can still get the TRMM data this way
+    CEGraph = mccSearch.findCloudElements(mergImgs, timeList, TRMMdirName)
+    # if the TRMMdirName wasnt entered for whatever reason, you can still get the TRMM data this way
     # CEGraph = mccSearch.findCloudElements(mergImgs,timeList)
     # allCETRMMList=mccSearch.findPrecipRate(TRMMdirName,timeList)
     # ----------------------------------------------------------------------------------------------
-    print ("-"*80)
+    print ("-" * 80)
     print "number of nodes in CEGraph is: ", CEGraph.number_of_nodes()
-    print ("-"*80)    
+    print ("-" * 80)
     print "\n -------------- TESTING findCloudClusters ----------"
     prunedGraph = mccSearch.findCloudClusters(CEGraph)
-    print ("-"*80)
+    print ("-" * 80)
     print "number of nodes in prunedGraph is: ", prunedGraph.number_of_nodes()
-    print ("-"*80)
-    #sys.exit()
+    print ("-" * 80)
+    # sys.exit()
     print "\n -------------- TESTING findMCCs ----------"
-    MCCList,MCSList = mccSearch.findMCC(prunedGraph)
-    print ("-"*80)
+    MCCList, MCSList = mccSearch.findMCC(prunedGraph)
+    print ("-" * 80)
     print "MCC List has been acquired ", len(MCCList)
     print "MCS List has been acquired ", len(MCSList)
-    print ("-"*80)
-    #now ready to perform various calculations/metrics
+    print ("-" * 80)
+    # now ready to perform various calculations/metrics
     print "\n -------------- TESTING METRICS ----------"
 
-    #some calculations/metrics that work that work
+    # some calculations/metrics that work that work
     # print "creating the MCC userfile ", mccSearch.createTextFile(MCCList,1)
     # print "creating the MCS userfile ", mccSearch.createTextFile(MCSList,2)
     # MCCTimes, tdelta = mccSearch.temporalAndAreaInfoMetric(MCCList)
@@ -100,9 +99,9 @@ def main():
     # print "shortest duration is: ", mccSearch.shortestDuration(MCCTimes), "hrs"
     # #print "Average duration is: ", mccSearch.convert_timedelta(mccSearch.averageMCCLength(MCCTimes))
     # print "Average duration is: ", mccSearch.averageDuration(MCCTimes), "hrs"
-    # print "Average size is: ", mccSearch.averageFeatureSize(MCCList), "km^2" 
-    
-    #some plots that work
+    # print "Average size is: ", mccSearch.averageFeatureSize(MCCList), "km^2"
+
+    # some plots that work
     # mccSearch.plotAccTRMM(MCCList)
     # mccSearch.displayPrecip(MCCList)
     # mccSearch.plotAccuInTimeRange('2009-09-01_00:00:00', '2009-09-01_09:00:00')
@@ -110,6 +109,6 @@ def main():
     # mccSearch.displayPrecip(MCCList)
     # mccSearch.plotHistogram(MCCList)
     #
-    print ("-"*80)
-    
-main()
\ No newline at end of file
+    print ("-" * 80)
+
+main()

http://git-wip-us.apache.org/repos/asf/climate/blob/731419f8/ocw/data_source/esgf.py
----------------------------------------------------------------------
diff --git a/ocw/data_source/esgf.py b/ocw/data_source/esgf.py
index ac802c0..67c307f 100644
--- a/ocw/data_source/esgf.py
+++ b/ocw/data_source/esgf.py
@@ -29,6 +29,7 @@ import ocw.data_source.local as local
 from bs4 import BeautifulSoup
 import requests
 
+
 def load_dataset(dataset_id,
                  variable_name,
                  esgf_username,
@@ -104,6 +105,7 @@ def load_dataset(dataset_id,
 
     return datasets
 
+
 def _get_file_download_data(dataset_id, variable, url=DEFAULT_ESGF_SEARCH):
     ''''''
     url += '?type=File&dataset_id={}&variable={}'
@@ -130,6 +132,7 @@ def _get_file_download_data(dataset_id, variable, url=DEFAULT_ESGF_SEARCH):
 
     return zip(urls, variables)
 
+
 def _download_files(file_urls, username, password, download_directory='/tmp'):
     ''''''
     try:


Mime
View raw message