You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by hu...@apache.org on 2015/09/25 18:33:20 UTC

[2/3] climate git commit: CLIMATE-676 - Cleaning up the examples

CLIMATE-676 - Cleaning up the examples

- Following up CLIMATE-666, some files are renamed and others are moved into a new folder.


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/fd310f40
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/fd310f40
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/fd310f40

Branch: refs/heads/master
Commit: fd310f400d3ce1027925205f82fcf99691e39f9e
Parents: e78ac8e
Author: huikyole <hu...@argo.jpl.nasa.gov>
Authored: Fri Sep 25 09:23:29 2015 -0700
Committer: huikyole <hu...@argo.jpl.nasa.gov>
Committed: Fri Sep 25 09:23:29 2015 -0700

----------------------------------------------------------------------
 ...ia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml |  45 ----
 ...ia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml |  45 ++++
 ...ordex-AF_tasmax_annual_mean_bias_to_cru.yaml |  46 ++++
 ...prec_subregion_annual_cycle_time_series.yaml |  90 ++++++++
 .../metrics_and_plots.py                        | 219 +++++++++++++++++++
 ...cap_prec_JJA_mean_taylor_diagram_to_cru.yaml |  44 ++++
 ...nterannual_variability_portrait_diagram.yaml |  75 +++++++
 .../configuration_file_examples/run_RCMES.py    | 216 ++++++++++++++++++
 ...ordex-AF_tasmax_annual_mean_bias_to_cru.yaml |  46 ----
 examples/cordex-AF_tasmax_bias_to_cru.yaml      |  46 ----
 ...prec_subregion_annual_cycle_time_series.yaml |  90 --------
 examples/example_package.py                     | 219 -------------------
 ...cap_prec_JJA_mean_taylor_diagram_to_cru.yaml |  44 ----
 .../narccap_prec_JJA_taylor_diagram_to_cru.yaml |  44 ----
 ...nterannual_variability_portrait_diagram.yaml |  75 -------
 .../old_examples/knmi_to_cru31_full_bias.py     | 174 ---------------
 .../old_examples/model_ensemble_to_rcmed.py     | 186 ----------------
 examples/old_examples/multi_model_evaluation.py | 151 -------------
 .../old_examples/multi_model_taylor_diagram.py  | 144 ------------
 .../old_examples/simple_model_to_model_bias.py  | 124 -----------
 .../simple_model_to_model_bias_DJF_and_JJA.py   |  64 ------
 examples/old_examples/simple_model_tstd.py      |  89 --------
 examples/old_examples/subregions.py             |  53 -----
 .../old_examples/subregions_portrait_diagram.py | 139 ------------
 examples/old_examples/taylor_diagram_example.py | 113 ----------
 .../old_examples/time_series_with_regions.py    | 141 ------------
 examples/run_RCMES.py                           | 216 ------------------
 27 files changed, 735 insertions(+), 2203 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
----------------------------------------------------------------------
diff --git a/examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml b/examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
deleted file mode 100644
index 276e744..0000000
--- a/examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
+++ /dev/null
@@ -1,45 +0,0 @@
-workdir: ./
-output_netcdf_filename: cmip5_SE_Asia_prec_DJF_1998-2010.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True  
-    start_time: 1981-01-01
-    end_time: 2010-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: False  
-
-space:
-    min_lat: -15.14
-    max_lat: 27.26
-    min_lon: 89.26  
-    max_lon: 146.96
-
-regrid:
-    regrid_on_reference: True  
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: TRMM
-        dataset_id: 3
-        parameter_id: 36
-
-    targets:
-        data_source: local
-        path: ./data/pr_Amon*                                   
-        variable: pr    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots1:
-    file_name: cmip5_SE_ASIA_prec_DJF_mean_taylor_diagram_to_TRMM
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/configuration_file_examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml b/examples/configuration_file_examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
new file mode 100644
index 0000000..276e744
--- /dev/null
+++ b/examples/configuration_file_examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
@@ -0,0 +1,45 @@
+workdir: ./
+output_netcdf_filename: cmip5_SE_Asia_prec_DJF_1998-2010.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True  
+    start_time: 1981-01-01
+    end_time: 2010-12-31
+    temporal_resolution: monthly
+    month_start: 12
+    month_end: 2
+    average_each_year: False  
+
+space:
+    min_lat: -15.14
+    max_lat: 27.26
+    min_lon: 89.26  
+    max_lon: 146.96
+
+regrid:
+    regrid_on_reference: True  
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: TRMM
+        dataset_id: 3
+        parameter_id: 36
+
+    targets:
+        data_source: local
+        path: ./data/pr_Amon*                                   
+        variable: pr    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots1:
+    file_name: cmip5_SE_ASIA_prec_DJF_mean_taylor_diagram_to_TRMM
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/configuration_file_examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml b/examples/configuration_file_examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
new file mode 100644
index 0000000..042a9a3
--- /dev/null
+++ b/examples/configuration_file_examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
@@ -0,0 +1,46 @@
+workdir: ./
+output_netcdf_filename: cordex-AF_CRU_taxmax_monthly_1990-2007.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: False
+
+space:
+    min_lat: -45.76
+    max_lat: 42.24
+    min_lon: -24.64
+    max_lon: 60.28
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ./data/AFRICA*tasmax.nc                                                    
+        variable: tasmax  
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-AF_tasmax_annual_mean_bias_to_cru
+    subplots_array: !!python/tuple [3,4] 
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml b/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
new file mode 100644
index 0000000..9483cae
--- /dev/null
+++ b/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
@@ -0,0 +1,90 @@
+workdir: ./
+output_netcdf_filename: cordex_AF_prec_monthly_mean_1990-2007.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1998-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: False
+
+space:
+    min_lat: -45.76
+    max_lat: 42.24
+    min_lon: -24.64
+    max_lon: 60.28
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU  
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ./data/AFRICA*pr.nc                                
+        variable: pr    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Timeseries_plot_subregion_annual_cycle
+
+plots1:
+    file_name: cordex_AF_prec_subregion_annual_cycle_time_series
+    subplots_array: !!python/tuple [7,3]
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01:
+      [29.0, 36.5, -10.0, 0.0]
+    R02:
+      [29, 37.5, 0, 10]
+    R03:
+      [25, 32.5, 10, 20]
+    R04:
+      [25, 32.5, 20, 33]
+    R05:
+      [12, 20.0, -19.3, -10.2]
+    R06:
+      [15, 25.0, 15, 30]
+    R07:
+      [7.3, 15,  -10, 10]
+    R08:
+      [5, 7.3,  -10, 10]
+    R09:
+      [6.9, 15, 33.9, 40]
+    R10:
+      [2.2, 11.8, 44.2, 51.8]
+    R11:
+      [0, 10, 10, 25]
+    R12:
+      [-10, 0, 10, 25]
+    R13:
+      [-15, 0, 30, 40]
+    R14:
+      [-27.9, -21.4, 13.6, 20]
+    R15:
+      [-35, -27.9, 13.6, 20]
+    R16:
+      [-35, -21.4, 20, 35.7]
+    R17:
+      [-25.8, -11.7, 43.2, 50.3]
+    R18:
+      [25, 35.0, 33, 40]
+    R19:
+      [28, 35, 45, 50]
+    R20:
+      [13, 20.0, 43, 50]
+    R21:
+      [20, 27.5, 50, 58]

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/configuration_file_examples/metrics_and_plots.py
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/metrics_and_plots.py b/examples/configuration_file_examples/metrics_and_plots.py
new file mode 100644
index 0000000..b564da5
--- /dev/null
+++ b/examples/configuration_file_examples/metrics_and_plots.py
@@ -0,0 +1,219 @@
+#Apache OCW lib immports
+import ocw.data_source.local as local
+import ocw.plotter as plotter
+import ocw.utils as utils
+from ocw.evaluation import Evaluation
+import ocw.metrics as metrics
+
+# Python libraries
+import numpy as np
+import numpy.ma as ma
+import matplotlib.pyplot as plt
+from mpl_toolkits.basemap import Basemap 
+from matplotlib import rcParams
+from matplotlib.patches import Polygon
+import string
+
+def Map_plot_bias_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
+                                      file_name, row, column):
+    '''Draw maps of observed multi-year climatology and biases of models"'''
+
+    # calculate climatology of observation data
+    obs_clim = utils.calc_temporal_mean(obs_dataset)
+    # determine the metrics
+    map_of_bias = metrics.TemporalMeanBias()
+
+    # create the Evaluation object
+    bias_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
+                                 model_datasets, # list of target datasets for the evaluation
+                                 [map_of_bias, map_of_bias])
+
+    # run the evaluation (bias calculation)
+    bias_evaluation.run() 
+
+    rcm_bias = bias_evaluation.results[0]
+
+    fig = plt.figure()
+
+    lat_min = obs_dataset.lats.min()
+    lat_max = obs_dataset.lats.max()
+    lon_min = obs_dataset.lons.min()
+    lon_max = obs_dataset.lons.max()
+
+    string_list = list(string.ascii_lowercase) 
+    ax = fig.add_subplot(row,column,1)
+    m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
+            llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
+    lons, lats = np.meshgrid(obs_dataset.lons, obs_dataset.lats)
+
+    x,y = m(lons, lats)
+
+    m.drawcoastlines(linewidth=1)
+    m.drawcountries(linewidth=1)
+    m.drawstates(linewidth=0.5, color='w')
+    max = m.contourf(x,y,obs_clim,levels = plotter._nice_intervals(obs_dataset.values, 10), extend='both',cmap='PuOr')
+    ax.annotate('(a) \n' + obs_name,xy=(lon_min, lat_min))
+    cax = fig.add_axes([0.02, 1.-float(1./row), 0.01, 1./row*0.6])
+    plt.colorbar(max, cax = cax) 
+    clevs = plotter._nice_intervals(rcm_bias, 11)
+    for imodel in np.arange(len(model_datasets)):
+        ax = fig.add_subplot(row, column,2+imodel)
+        m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
+                llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
+        m.drawcoastlines(linewidth=1)
+        m.drawcountries(linewidth=1)
+        m.drawstates(linewidth=0.5, color='w')
+        max = m.contourf(x,y,rcm_bias[imodel,:],levels = clevs, extend='both', cmap='RdBu_r')
+        ax.annotate('('+string_list[imodel+1]+')  \n '+model_names[imodel],xy=(lon_min, lat_min))
+
+    cax = fig.add_axes([0.91, 0.1, 0.015, 0.8])
+    plt.colorbar(max, cax = cax) 
+
+    plt.subplots_adjust(hspace=0.01,wspace=0.05)
+
+    plt.show()
+    fig.savefig(file_name,dpi=600,bbox_inches='tight')
+
+def Taylor_diagram_spatial_pattern_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
+                                      file_name):
+
+    # calculate climatological mean fields
+    obs_dataset.values = utils.calc_temporal_mean(obs_dataset)
+    for dataset in model_datasets:
+        dataset.values = utils.calc_temporal_mean(dataset)
+
+    # Metrics (spatial standard deviation and pattern correlation)
+    # determine the metrics
+    taylor_diagram = metrics.SpatialPatternTaylorDiagram()
+
+    # create the Evaluation object
+    taylor_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
+                                 model_datasets, # list of target datasets for the evaluation
+                                 [taylor_diagram])
+
+    # run the evaluation (bias calculation)
+    taylor_evaluation.run() 
+
+    taylor_data = taylor_evaluation.results[0]
+
+    plotter.draw_taylor_diagram(taylor_data, model_names, obs_name, file_name, pos='upper right',frameon=False)
+
+def Time_series_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle, 
+                          file_name, row, column, x_tick=['']):
+
+    nmodel, nt, nregion = model_subregion_mean.shape  
+
+    if seasonal_cycle:
+        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
+        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
+        nt = 12
+    else:
+        obs_data = obs_subregion_mean
+        model_data = model_subregion_mean
+        
+    x_axis = np.arange(nt)
+    x_tick_values = x_axis
+
+    fig = plt.figure()
+    rcParams['xtick.labelsize'] = 6
+    rcParams['ytick.labelsize'] = 6
+  
+    for iregion in np.arange(nregion):
+        ax = fig.add_subplot(row, column, iregion+1) 
+        x_tick_labels = ['']
+        if iregion+1  > column*(row-1):
+            x_tick_labels = x_tick 
+        else:
+            x_tick_labels=['']
+        ax.plot(x_axis, obs_data[0, :, iregion], color='r', lw=2, label=obs_name)
+        for imodel in np.arange(nmodel):
+            ax.plot(x_axis, model_data[imodel, :, iregion], lw=0.5, label = model_names[imodel])
+        ax.set_xlim([-0.5,nt-0.5])
+        ax.set_xticks(x_tick_values)
+        ax.set_xticklabels(x_tick_labels)
+        ax.set_title('Region %02d' % (iregion+1), fontsize=8)
+    
+    ax.legend(bbox_to_anchor=(-0.2, row/2), loc='center' , prop={'size':7}, frameon=False)  
+
+    fig.subplots_adjust(hspace=0.7, wspace=0.5)
+    plt.show()
+    fig.savefig(file_name, dpi=600, bbox_inches='tight')
+
+def Portrait_diagram_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle,
+                               file_name, normalize=True):
+
+    nmodel, nt, nregion = model_subregion_mean.shape
+    
+    if seasonal_cycle:
+        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
+        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
+        nt = 12
+    else:
+        obs_data = obs_subregion_mean
+        model_data = model_subregion_mean
+
+    subregion_metrics = ma.zeros([4, nregion, nmodel])
+
+    for imodel in np.arange(nmodel):
+        for iregion in np.arange(nregion):
+            # First metric: bias
+            subregion_metrics[0, iregion, imodel] = metrics.calc_bias(model_data[imodel, :, iregion], obs_data[0, :, iregion], average_over_time = True)
+            # Second metric: standard deviation
+            subregion_metrics[1, iregion, imodel] = metrics.calc_stddev_ratio(model_data[imodel, :, iregion], obs_data[0, :, iregion])
+            # Third metric: RMSE
+            subregion_metrics[2, iregion, imodel] = metrics.calc_rmse(model_data[imodel, :, iregion], obs_data[0, :, iregion])
+            # Fourth metric: correlation
+            subregion_metrics[3, iregion, imodel] = metrics.calc_correlation(model_data[imodel, :, iregion], obs_data[0, :, iregion])
+   
+    if normalize:
+        for iregion in np.arange(nregion):
+            subregion_metrics[0, iregion, : ] = subregion_metrics[0, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
+            subregion_metrics[1, iregion, : ] = subregion_metrics[1, iregion, : ]*100. 
+            subregion_metrics[2, iregion, : ] = subregion_metrics[2, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
+
+    region_names = ['R%02d' % i for i in np.arange(nregion)+1]
+
+    for imetric, metric in enumerate(['bias','std','RMSE','corr']):
+        plotter.draw_portrait_diagram(subregion_metrics[imetric, :, :], region_names, model_names, file_name+'_'+metric, 
+                                      xlabel='model',ylabel='region')             
+
+def Map_plot_subregion(subregions, ref_dataset, directory):
+  
+    lons, lats = np.meshgrid(ref_dataset.lons, ref_dataset.lats) 
+    fig = plt.figure()
+    ax = fig.add_subplot(111)
+    m = Basemap(ax=ax, projection='cyl',llcrnrlat = lats.min(), urcrnrlat = lats.max(),
+                llcrnrlon = lons.min(), urcrnrlon = lons.max(), resolution = 'l')
+    m.drawcoastlines(linewidth=0.75)
+    m.drawcountries(linewidth=0.75)
+    m.etopo()  
+    x, y = m(lons, lats) 
+    #subregion_array = ma.masked_equal(subregion_array, 0)
+    #max=m.contourf(x, y, subregion_array, alpha=0.7, cmap='Accent')
+    for subregion in subregions:
+        draw_screen_poly(subregion[1], m, 'w') 
+        plt.annotate(subregion[0],xy=(0.5*(subregion[1][2]+subregion[1][3]), 0.5*(subregion[1][0]+subregion[1][1])), ha='center',va='center', fontsize=8) 
+    plt.show()
+    fig.savefig(directory+'map_subregion', bbox_inches='tight')
+
+def draw_screen_poly(boundary_array, m, linecolor='k'):
+
+    ''' Draw a polygon on a map
+
+    :param boundary_array: [lat_north, lat_south, lon_east, lon_west]
+    :param m   : Basemap object
+    '''
+
+    lats = [boundary_array[0], boundary_array[0], boundary_array[1], boundary_array[1]]
+    lons = [boundary_array[3], boundary_array[2], boundary_array[2], boundary_array[3]]
+    x, y = m( lons, lats )
+    xy = zip(x,y)
+    poly = Polygon( xy, facecolor='none',edgecolor=linecolor )
+    plt.gca().add_patch(poly)
+    
+    
+   
+
+    
+
+    

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml b/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
new file mode 100644
index 0000000..c6b96cf
--- /dev/null
+++ b/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
@@ -0,0 +1,44 @@
+workdir: ./                                      
+output_netcdf_filename: narccap_prec_JJA_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 6
+    month_end: 8
+    average_each_year: True  
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ./data/prec.*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots1:
+    file_name: narccap_prec_JJA_mean_taylor_diagram_to_cru
+
+use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml b/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
new file mode 100644
index 0000000..de2d98e
--- /dev/null
+++ b/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_tas_DJF_mean_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 12
+    month_end: 2
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 38
+
+    targets:
+        data_source: local
+        path: ./data/temp*ncep.monavg.nc                                                    
+        variable: temp    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: narccap_tas_DJF_subregion_interannual_variability_portrait_diagram
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/configuration_file_examples/run_RCMES.py
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/run_RCMES.py b/examples/configuration_file_examples/run_RCMES.py
new file mode 100644
index 0000000..6064c11
--- /dev/null
+++ b/examples/configuration_file_examples/run_RCMES.py
@@ -0,0 +1,216 @@
+#Apache OCW lib immports
+import ocw.dataset_processor as dsp
+import ocw.data_source.local as local
+import ocw.data_source.rcmed as rcmed
+import ocw.plotter as plotter
+import ocw.utils as utils
+from ocw.dataset import Bounds
+
+import matplotlib.pyplot as plt
+from matplotlib import rcParams
+import numpy as np
+import numpy.ma as ma
+import yaml
+from glob import glob
+import operator
+from dateutil import parser
+from datetime import datetime
+import os
+import sys
+
+from metrics_and_plots import *
+
+import ssl
+if hasattr(ssl, '_create_unverified_context'):
+  ssl._create_default_https_context = ssl._create_unverified_context
+
+config_file = str(sys.argv[1])
+
+print 'Reading the configuration file ', config_file
+config = yaml.load(open(config_file))
+time_info = config['time']
+temporal_resolution = time_info['temporal_resolution']
+
+start_time = datetime.strptime(time_info['start_time'].strftime('%Y%m%d'),'%Y%m%d')
+end_time = datetime.strptime(time_info['end_time'].strftime('%Y%m%d'),'%Y%m%d')
+
+space_info = config['space']
+min_lat = space_info['min_lat']
+max_lat = space_info['max_lat']
+min_lon = space_info['min_lon']
+max_lon = space_info['max_lon']
+
+""" Step 1: Load the reference data """
+ref_data_info = config['datasets']['reference']
+print 'Loading observation dataset:\n',ref_data_info
+ref_name = ref_data_info['data_name']
+if ref_data_info['data_source'] == 'local':
+    ref_dataset = local.load_file(ref_data_info['path'],
+                                  ref_data_info['variable'], name=ref_name)
+elif ref_data_info['data_source'] == 'rcmed':
+      ref_dataset = rcmed.parameter_dataset(ref_data_info['dataset_id'],
+                                            ref_data_info['parameter_id'],
+                                            min_lat, max_lat, min_lon, max_lon,
+                                            start_time, end_time)
+else:
+    print ' '
+    # TO DO: support ESGF
+
+ref_dataset =  dsp.normalize_dataset_datetimes(ref_dataset, temporal_resolution)
+
+""" Step 2: Load model NetCDF Files into OCW Dataset Objects """
+model_data_info = config['datasets']['targets']
+print 'Loading model datasets:\n',model_data_info
+if model_data_info['data_source'] == 'local':
+    model_datasets, model_names = local.load_multiple_files(file_path = model_data_info['path'],
+                                                            variable_name =model_data_info['variable'])
+else:
+    print ' '
+    # TO DO: support RCMED and ESGF
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.normalize_dataset_datetimes(dataset, temporal_resolution)
+
+""" Step 3: Subset the data for temporal and spatial domain """
+# Create a Bounds object to use for subsetting
+if time_info['maximum_overlap_period']:
+    start_time, end_time = utils.get_temporal_overlap([ref_dataset]+model_datasets)
+    print 'Maximum overlap period'
+    print 'start_time:', start_time
+    print 'end_time:', end_time
+
+if temporal_resolution == 'monthly' and end_time.day !=1:
+    end_time = end_time.replace(day=1)
+if ref_data_info['data_source'] == 'rcmed':
+    min_lat = np.max([min_lat, ref_dataset.lats.min()])
+    max_lat = np.min([max_lat, ref_dataset.lats.max()])
+    min_lon = np.max([min_lon, ref_dataset.lons.min()])
+    max_lon = np.min([max_lon, ref_dataset.lons.max()])
+bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
+
+if ref_dataset.lats.ndim !=2 and ref_dataset.lons.ndim !=2:
+    ref_dataset = dsp.subset(bounds,ref_dataset)
+for idata,dataset in enumerate(model_datasets):
+    if dataset.lats.ndim !=2 and dataset.lons.ndim !=2:
+        model_datasets[idata] = dsp.subset(bounds,dataset)
+
+# Temporaly subset both observation and model datasets for the user specified season
+month_start = time_info['month_start']
+month_end = time_info['month_end']
+average_each_year = time_info['average_each_year']
+
+ref_dataset = dsp.temporal_subset(month_start, month_end,ref_dataset,average_each_year)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.temporal_subset(month_start, month_end,dataset,average_each_year)
+
+# generate grid points for regridding
+if config['regrid']['regrid_on_reference']:
+    new_lat = ref_dataset.lats
+    new_lon = ref_dataset.lons 
+else:
+    delta_lat = config['regrid']['regrid_dlat']
+    delta_lon = config['regrid']['regrid_dlon']
+    nlat = (max_lat - min_lat)/delta_lat+1
+    nlon = (max_lon - min_lon)/delta_lon+1
+    new_lat = np.linspace(min_lat, max_lat, nlat)
+    new_lon = np.linspace(min_lon, max_lon, nlon)
+
+# number of models
+nmodel = len(model_datasets)
+print 'Dataset loading completed'
+print 'Observation data:', ref_name 
+print 'Number of model datasets:',nmodel
+for model_name in model_names:
+    print model_name
+
+""" Step 4: Spatial regriding of the reference datasets """
+print 'Regridding datasets: ', config['regrid']
+if not config['regrid']['regrid_on_reference']:
+    ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon)
+
+print 'Propagating missing data information'
+ref_dataset = dsp.mask_missing_data([ref_dataset]+model_datasets)[0]
+model_datasets = dsp.mask_missing_data([ref_dataset]+model_datasets)[1:]
+
+""" Step 5: Checking and converting variable units """
+print 'Checking and converting variable units'
+ref_dataset = dsp.variable_unit_conversion(ref_dataset)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.variable_unit_conversion(dataset)
+    
+
+print 'Generating multi-model ensemble'
+model_datasets.append(dsp.ensemble(model_datasets))
+model_names.append('ENS-models')
+
+""" Step 6: Generate subregion average and standard deviation """
+if config['use_subregions']:
+    # sort the subregion by region names and make a list
+    subregions= sorted(config['subregions'].items(),key=operator.itemgetter(0))
+
+    # number of subregions
+    nsubregion = len(subregions)
+
+    print 'Calculating spatial averages and standard deviations of ',str(nsubregion),' subregions'
+
+    ref_subregion_mean, ref_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std([ref_dataset], subregions) 
+    model_subregion_mean, model_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std(model_datasets, subregions) 
+
+""" Step 7: Write a netCDF file """
+workdir = config['workdir']
+if workdir[-1] != '/':
+    workdir = workdir+'/'
+print 'Writing a netcdf file: ',workdir+config['output_netcdf_filename']
+if not os.path.exists(workdir):
+    os.system("mkdir "+workdir)
+
+if config['use_subregions']:
+    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets, model_names,
+                                                       path=workdir+config['output_netcdf_filename'],
+                                                       subregions=subregions, subregion_array = subregion_array, 
+                                                       ref_subregion_mean=ref_subregion_mean, ref_subregion_std=ref_subregion_std,
+                                                       model_subregion_mean=model_subregion_mean, model_subregion_std=model_subregion_std)
+else:
+    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets, model_names,
+                                                       path=workdir+config['output_netcdf_filename'])
+
+""" Step 8: Calculate metrics and draw plots """
+nmetrics = config['number_of_metrics_and_plots']
+if config['use_subregions']:
+    Map_plot_subregion(subregions, ref_dataset, workdir)
+
+if nmetrics > 0:
+    print 'Calculating metrics and generating plots'
+    for imetric in np.arange(nmetrics)+1:
+        metrics_name = config['metrics'+'%1d' %imetric]
+        plot_info = config['plots'+'%1d' %imetric]
+        file_name = workdir+plot_info['file_name']
+
+        print 'metrics '+str(imetric)+'/'+str(nmetrics)+': ', metrics_name
+        if metrics_name == 'Map_plot_bias_of_multiyear_climatology':
+            row, column = plot_info['subplots_array']
+            Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
+                                      file_name, row, column)
+        elif metrics_name == 'Taylor_diagram_spatial_pattern_of_multiyear_climatology':
+            Taylor_diagram_spatial_pattern_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
+                                      file_name)
+        elif config['use_subregions']:
+            if metrics_name == 'Timeseries_plot_subregion_interannual_variability' and average_each_year:
+                row, column = plot_info['subplots_array']
+                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, False,
+                                      file_name, row, column, x_tick=['Y'+str(i+1) for i in np.arange(model_subregion_mean.shape[1])])
+            if metrics_name == 'Timeseries_plot_subregion_annual_cycle' and not average_each_year and month_start==1 and month_end==12:
+                row, column = plot_info['subplots_array']
+                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, True,
+                                      file_name, row, column, x_tick=['J','F','M','A','M','J','J','A','S','O','N','D'])
+            if metrics_name == 'Portrait_diagram_subregion_interannual_variability' and average_each_year:
+                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, False,
+                                      file_name)
+            if metrics_name == 'Portrait_diagram_subregion_annual_cycle' and not average_each_year and month_start==1 and month_end==12:
+                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, True,
+                                      file_name)
+        else:
+            print 'please check the currently supported metrics'
+
+

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml b/examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
deleted file mode 100644
index 042a9a3..0000000
--- a/examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
-workdir: ./
-output_netcdf_filename: cordex-AF_CRU_taxmax_monthly_1990-2007.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: -45.76
-    max_lat: 42.24
-    min_lon: -24.64
-    max_lon: 60.28
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/AFRICA*tasmax.nc                                                    
-        variable: tasmax  
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-AF_tasmax_annual_mean_bias_to_cru
-    subplots_array: !!python/tuple [3,4] 
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/cordex-AF_tasmax_bias_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/cordex-AF_tasmax_bias_to_cru.yaml b/examples/cordex-AF_tasmax_bias_to_cru.yaml
deleted file mode 100644
index 042a9a3..0000000
--- a/examples/cordex-AF_tasmax_bias_to_cru.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
-workdir: ./
-output_netcdf_filename: cordex-AF_CRU_taxmax_monthly_1990-2007.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: -45.76
-    max_lat: 42.24
-    min_lon: -24.64
-    max_lon: 60.28
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/AFRICA*tasmax.nc                                                    
-        variable: tasmax  
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-AF_tasmax_annual_mean_bias_to_cru
-    subplots_array: !!python/tuple [3,4] 
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
----------------------------------------------------------------------
diff --git a/examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml b/examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
deleted file mode 100644
index 9483cae..0000000
--- a/examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
+++ /dev/null
@@ -1,90 +0,0 @@
-workdir: ./
-output_netcdf_filename: cordex_AF_prec_monthly_mean_1990-2007.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1998-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: -45.76
-    max_lat: 42.24
-    min_lon: -24.64
-    max_lon: 60.28
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU  
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/AFRICA*pr.nc                                
-        variable: pr    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Timeseries_plot_subregion_annual_cycle
-
-plots1:
-    file_name: cordex_AF_prec_subregion_annual_cycle_time_series
-    subplots_array: !!python/tuple [7,3]
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01:
-      [29.0, 36.5, -10.0, 0.0]
-    R02:
-      [29, 37.5, 0, 10]
-    R03:
-      [25, 32.5, 10, 20]
-    R04:
-      [25, 32.5, 20, 33]
-    R05:
-      [12, 20.0, -19.3, -10.2]
-    R06:
-      [15, 25.0, 15, 30]
-    R07:
-      [7.3, 15,  -10, 10]
-    R08:
-      [5, 7.3,  -10, 10]
-    R09:
-      [6.9, 15, 33.9, 40]
-    R10:
-      [2.2, 11.8, 44.2, 51.8]
-    R11:
-      [0, 10, 10, 25]
-    R12:
-      [-10, 0, 10, 25]
-    R13:
-      [-15, 0, 30, 40]
-    R14:
-      [-27.9, -21.4, 13.6, 20]
-    R15:
-      [-35, -27.9, 13.6, 20]
-    R16:
-      [-35, -21.4, 20, 35.7]
-    R17:
-      [-25.8, -11.7, 43.2, 50.3]
-    R18:
-      [25, 35.0, 33, 40]
-    R19:
-      [28, 35, 45, 50]
-    R20:
-      [13, 20.0, 43, 50]
-    R21:
-      [20, 27.5, 50, 58]

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/example_package.py
----------------------------------------------------------------------
diff --git a/examples/example_package.py b/examples/example_package.py
deleted file mode 100644
index b564da5..0000000
--- a/examples/example_package.py
+++ /dev/null
@@ -1,219 +0,0 @@
-#Apache OCW lib immports
-import ocw.data_source.local as local
-import ocw.plotter as plotter
-import ocw.utils as utils
-from ocw.evaluation import Evaluation
-import ocw.metrics as metrics
-
-# Python libraries
-import numpy as np
-import numpy.ma as ma
-import matplotlib.pyplot as plt
-from mpl_toolkits.basemap import Basemap 
-from matplotlib import rcParams
-from matplotlib.patches import Polygon
-import string
-
-def Map_plot_bias_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
-                                      file_name, row, column):
-    '''Draw maps of observed multi-year climatology and biases of models"'''
-
-    # calculate climatology of observation data
-    obs_clim = utils.calc_temporal_mean(obs_dataset)
-    # determine the metrics
-    map_of_bias = metrics.TemporalMeanBias()
-
-    # create the Evaluation object
-    bias_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
-                                 model_datasets, # list of target datasets for the evaluation
-                                 [map_of_bias, map_of_bias])
-
-    # run the evaluation (bias calculation)
-    bias_evaluation.run() 
-
-    rcm_bias = bias_evaluation.results[0]
-
-    fig = plt.figure()
-
-    lat_min = obs_dataset.lats.min()
-    lat_max = obs_dataset.lats.max()
-    lon_min = obs_dataset.lons.min()
-    lon_max = obs_dataset.lons.max()
-
-    string_list = list(string.ascii_lowercase) 
-    ax = fig.add_subplot(row,column,1)
-    m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
-            llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
-    lons, lats = np.meshgrid(obs_dataset.lons, obs_dataset.lats)
-
-    x,y = m(lons, lats)
-
-    m.drawcoastlines(linewidth=1)
-    m.drawcountries(linewidth=1)
-    m.drawstates(linewidth=0.5, color='w')
-    max = m.contourf(x,y,obs_clim,levels = plotter._nice_intervals(obs_dataset.values, 10), extend='both',cmap='PuOr')
-    ax.annotate('(a) \n' + obs_name,xy=(lon_min, lat_min))
-    cax = fig.add_axes([0.02, 1.-float(1./row), 0.01, 1./row*0.6])
-    plt.colorbar(max, cax = cax) 
-    clevs = plotter._nice_intervals(rcm_bias, 11)
-    for imodel in np.arange(len(model_datasets)):
-        ax = fig.add_subplot(row, column,2+imodel)
-        m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
-                llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
-        m.drawcoastlines(linewidth=1)
-        m.drawcountries(linewidth=1)
-        m.drawstates(linewidth=0.5, color='w')
-        max = m.contourf(x,y,rcm_bias[imodel,:],levels = clevs, extend='both', cmap='RdBu_r')
-        ax.annotate('('+string_list[imodel+1]+')  \n '+model_names[imodel],xy=(lon_min, lat_min))
-
-    cax = fig.add_axes([0.91, 0.1, 0.015, 0.8])
-    plt.colorbar(max, cax = cax) 
-
-    plt.subplots_adjust(hspace=0.01,wspace=0.05)
-
-    plt.show()
-    fig.savefig(file_name,dpi=600,bbox_inches='tight')
-
-def Taylor_diagram_spatial_pattern_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
-                                      file_name):
-
-    # calculate climatological mean fields
-    obs_dataset.values = utils.calc_temporal_mean(obs_dataset)
-    for dataset in model_datasets:
-        dataset.values = utils.calc_temporal_mean(dataset)
-
-    # Metrics (spatial standard deviation and pattern correlation)
-    # determine the metrics
-    taylor_diagram = metrics.SpatialPatternTaylorDiagram()
-
-    # create the Evaluation object
-    taylor_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
-                                 model_datasets, # list of target datasets for the evaluation
-                                 [taylor_diagram])
-
-    # run the evaluation (bias calculation)
-    taylor_evaluation.run() 
-
-    taylor_data = taylor_evaluation.results[0]
-
-    plotter.draw_taylor_diagram(taylor_data, model_names, obs_name, file_name, pos='upper right',frameon=False)
-
-def Time_series_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle, 
-                          file_name, row, column, x_tick=['']):
-
-    nmodel, nt, nregion = model_subregion_mean.shape  
-
-    if seasonal_cycle:
-        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
-        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
-        nt = 12
-    else:
-        obs_data = obs_subregion_mean
-        model_data = model_subregion_mean
-        
-    x_axis = np.arange(nt)
-    x_tick_values = x_axis
-
-    fig = plt.figure()
-    rcParams['xtick.labelsize'] = 6
-    rcParams['ytick.labelsize'] = 6
-  
-    for iregion in np.arange(nregion):
-        ax = fig.add_subplot(row, column, iregion+1) 
-        x_tick_labels = ['']
-        if iregion+1  > column*(row-1):
-            x_tick_labels = x_tick 
-        else:
-            x_tick_labels=['']
-        ax.plot(x_axis, obs_data[0, :, iregion], color='r', lw=2, label=obs_name)
-        for imodel in np.arange(nmodel):
-            ax.plot(x_axis, model_data[imodel, :, iregion], lw=0.5, label = model_names[imodel])
-        ax.set_xlim([-0.5,nt-0.5])
-        ax.set_xticks(x_tick_values)
-        ax.set_xticklabels(x_tick_labels)
-        ax.set_title('Region %02d' % (iregion+1), fontsize=8)
-    
-    ax.legend(bbox_to_anchor=(-0.2, row/2), loc='center' , prop={'size':7}, frameon=False)  
-
-    fig.subplots_adjust(hspace=0.7, wspace=0.5)
-    plt.show()
-    fig.savefig(file_name, dpi=600, bbox_inches='tight')
-
-def Portrait_diagram_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle,
-                               file_name, normalize=True):
-
-    nmodel, nt, nregion = model_subregion_mean.shape
-    
-    if seasonal_cycle:
-        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
-        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
-        nt = 12
-    else:
-        obs_data = obs_subregion_mean
-        model_data = model_subregion_mean
-
-    subregion_metrics = ma.zeros([4, nregion, nmodel])
-
-    for imodel in np.arange(nmodel):
-        for iregion in np.arange(nregion):
-            # First metric: bias
-            subregion_metrics[0, iregion, imodel] = metrics.calc_bias(model_data[imodel, :, iregion], obs_data[0, :, iregion], average_over_time = True)
-            # Second metric: standard deviation
-            subregion_metrics[1, iregion, imodel] = metrics.calc_stddev_ratio(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-            # Third metric: RMSE
-            subregion_metrics[2, iregion, imodel] = metrics.calc_rmse(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-            # Fourth metric: correlation
-            subregion_metrics[3, iregion, imodel] = metrics.calc_correlation(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-   
-    if normalize:
-        for iregion in np.arange(nregion):
-            subregion_metrics[0, iregion, : ] = subregion_metrics[0, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
-            subregion_metrics[1, iregion, : ] = subregion_metrics[1, iregion, : ]*100. 
-            subregion_metrics[2, iregion, : ] = subregion_metrics[2, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
-
-    region_names = ['R%02d' % i for i in np.arange(nregion)+1]
-
-    for imetric, metric in enumerate(['bias','std','RMSE','corr']):
-        plotter.draw_portrait_diagram(subregion_metrics[imetric, :, :], region_names, model_names, file_name+'_'+metric, 
-                                      xlabel='model',ylabel='region')             
-
-def Map_plot_subregion(subregions, ref_dataset, directory):
-  
-    lons, lats = np.meshgrid(ref_dataset.lons, ref_dataset.lats) 
-    fig = plt.figure()
-    ax = fig.add_subplot(111)
-    m = Basemap(ax=ax, projection='cyl',llcrnrlat = lats.min(), urcrnrlat = lats.max(),
-                llcrnrlon = lons.min(), urcrnrlon = lons.max(), resolution = 'l')
-    m.drawcoastlines(linewidth=0.75)
-    m.drawcountries(linewidth=0.75)
-    m.etopo()  
-    x, y = m(lons, lats) 
-    #subregion_array = ma.masked_equal(subregion_array, 0)
-    #max=m.contourf(x, y, subregion_array, alpha=0.7, cmap='Accent')
-    for subregion in subregions:
-        draw_screen_poly(subregion[1], m, 'w') 
-        plt.annotate(subregion[0],xy=(0.5*(subregion[1][2]+subregion[1][3]), 0.5*(subregion[1][0]+subregion[1][1])), ha='center',va='center', fontsize=8) 
-    plt.show()
-    fig.savefig(directory+'map_subregion', bbox_inches='tight')
-
-def draw_screen_poly(boundary_array, m, linecolor='k'):
-
-    ''' Draw a polygon on a map
-
-    :param boundary_array: [lat_north, lat_south, lon_east, lon_west]
-    :param m   : Basemap object
-    '''
-
-    lats = [boundary_array[0], boundary_array[0], boundary_array[1], boundary_array[1]]
-    lons = [boundary_array[3], boundary_array[2], boundary_array[2], boundary_array[3]]
-    x, y = m( lons, lats )
-    xy = zip(x,y)
-    poly = Polygon( xy, facecolor='none',edgecolor=linecolor )
-    plt.gca().add_patch(poly)
-    
-    
-   
-
-    
-
-    

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml b/examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
deleted file mode 100644
index c6b96cf..0000000
--- a/examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
+++ /dev/null
@@ -1,44 +0,0 @@
-workdir: ./                                      
-output_netcdf_filename: narccap_prec_JJA_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True  
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/prec.*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots1:
-    file_name: narccap_prec_JJA_mean_taylor_diagram_to_cru
-
-use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/narccap_prec_JJA_taylor_diagram_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/narccap_prec_JJA_taylor_diagram_to_cru.yaml b/examples/narccap_prec_JJA_taylor_diagram_to_cru.yaml
deleted file mode 100644
index c6b96cf..0000000
--- a/examples/narccap_prec_JJA_taylor_diagram_to_cru.yaml
+++ /dev/null
@@ -1,44 +0,0 @@
-workdir: ./                                      
-output_netcdf_filename: narccap_prec_JJA_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True  
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/prec.*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots1:
-    file_name: narccap_prec_JJA_mean_taylor_diagram_to_cru
-
-use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
----------------------------------------------------------------------
diff --git a/examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml b/examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
deleted file mode 100644
index de2d98e..0000000
--- a/examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_tas_DJF_mean_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ./data/temp*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: narccap_tas_DJF_subregion_interannual_variability_portrait_diagram
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/knmi_to_cru31_full_bias.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/knmi_to_cru31_full_bias.py b/examples/old_examples/knmi_to_cru31_full_bias.py
deleted file mode 100644
index a241442..0000000
--- a/examples/old_examples/knmi_to_cru31_full_bias.py
+++ /dev/null
@@ -1,174 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import urllib
-from os import path
-
-import numpy as np
-
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-from ocw.dataset import Bounds as Bounds
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# This way we can easily adjust the time span of the retrievals
-YEARS = 3
-# Two Local Model Files 
-MODEL = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
-# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "cru_31_tmax_knmi_africa_bias_full"
-
-# Download necessary NetCDF file if not present
-if path.exists(MODEL):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + MODEL, MODEL)
-
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects """
-print("Loading %s into an OCW Dataset Object" % (MODEL,))
-knmi_dataset = local.load_file(MODEL, "tasmax")
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
-
-""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
-print("Working with the rcmed interface to get CRU3.1 Daily-Max Temp")
-metadata = rcmed.get_parameters_metadata()
-
-cru_31 = [m for m in metadata if m['parameter_id'] == "39"][0]
-
-""" The RCMED API uses the following function to query, subset and return the 
-raw data from the database:
-
-rcmed.parameter_dataset(dataset_id, parameter_id, min_lat, max_lat, min_lon, 
-                        max_lon, start_time, end_time)
-
-The first two required params are in the cru_31 variable we defined earlier
-"""
-# Must cast to int since the rcmed api requires ints
-dataset_id = int(cru_31['dataset_id'])
-parameter_id = int(cru_31['parameter_id'])
-
-print("We are going to use the Model to constrain the Spatial Domain")
-#  The spatial_boundaries() function returns the spatial extent of the dataset
-print("The KNMI_Dataset spatial bounds (min_lat, max_lat, min_lon, max_lon) are: \n"
-      "%s\n" % (knmi_dataset.spatial_boundaries(), ))
-print("The KNMI_Dataset spatial resolution (lat_resolution, lon_resolution) is: \n"
-      "%s\n\n" % (knmi_dataset.spatial_resolution(), ))
-min_lat, max_lat, min_lon, max_lon = knmi_dataset.spatial_boundaries()
-
-print("Calculating the Maximum Overlap in Time for the datasets")
-
-cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
-cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
-knmi_start, knmi_end = knmi_dataset.time_range()
-# Grab the Max Start Time
-start_time = max([cru_start, knmi_start])
-# Grab the Min End Time
-end_time = min([cru_end, knmi_end])
-print("Overlap computed to be: %s to %s" % (start_time.strftime("%Y-%m-%d"),
-                                          end_time.strftime("%Y-%m-%d")))
-print("We are going to grab the first %s year(s) of data" % YEARS)
-end_time = datetime.datetime(start_time.year + YEARS, start_time.month, start_time.day)
-print("Final Overlap is: %s to %s" % (start_time.strftime("%Y-%m-%d"),
-                                          end_time.strftime("%Y-%m-%d")))
-
-print("Fetching data from RCMED...")
-cru31_dataset = rcmed.parameter_dataset(dataset_id,
-                                        parameter_id,
-                                        min_lat,
-                                        max_lat,
-                                        min_lon,
-                                        max_lon,
-                                        start_time,
-                                        end_time)
-
-""" Step 3: Resample Datasets so they are the same shape """
-print("CRU31_Dataset.values shape: (times, lats, lons) - %s" % (cru31_dataset.values.shape,))
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s" % (knmi_dataset.values.shape,))
-print("Our two datasets have a mis-match in time. We will subset on time to %s years\n" % YEARS)
-
-# Create a Bounds object to use for subsetting
-new_bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
-knmi_dataset = dsp.subset(new_bounds, knmi_dataset)
-
-print("CRU31_Dataset.values shape: (times, lats, lons) - %s" % (cru31_dataset.values.shape,))
-print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
-
-print("Temporally Rebinning the Datasets to a Single Timestep")
-# To run FULL temporal Rebinning use a timedelta > 366 days.  I used 999 in this example
-knmi_dataset = dsp.temporal_rebin(knmi_dataset, datetime.timedelta(days=999))
-cru31_dataset = dsp.temporal_rebin(cru31_dataset, datetime.timedelta(days=999))
-
-print("KNMI_Dataset.values shape: %s" % (knmi_dataset.values.shape,))
-print("CRU31_Dataset.values shape: %s \n\n" % (cru31_dataset.values.shape,))
- 
-""" Spatially Regrid the Dataset Objects to a 1/2 degree grid """
-# Using the bounds we will create a new set of lats and lons on 0.5 degree step
-new_lons = np.arange(min_lon, max_lon, 0.5)
-new_lats = np.arange(min_lat, max_lat, 0.5)
- 
-# Spatially regrid datasets using the new_lats, new_lons numpy arrays
-print("Spatially Regridding the KNMI_Dataset...")
-knmi_dataset = dsp.spatial_regrid(knmi_dataset, new_lats, new_lons)
-print("Spatially Regridding the CRU31_Dataset...")
-cru31_dataset = dsp.spatial_regrid(cru31_dataset, new_lats, new_lons)
-print("Final shape of the KNMI_Dataset:%s" % (knmi_dataset.values.shape, ))
-print("Final shape of the CRU31_Dataset:%s" % (cru31_dataset.values.shape, ))
- 
-""" Step 4:  Build a Metric to use for Evaluation - Bias for this example """
-# You can build your own metrics, but OCW also ships with some common metrics
-print("Setting up a Bias metric to use for evaluation")
-bias = metrics.Bias()
-
-""" Step 5: Create an Evaluation Object using Datasets and our Metric """
-# The Evaluation Class Signature is:
-# Evaluation(reference, targets, metrics, subregions=None)
-# Evaluation can take in multiple targets and metrics, so we need to convert
-# our examples into Python lists.  Evaluation will iterate over the lists
-print("Making the Evaluation definition")
-bias_evaluation = evaluation.Evaluation(knmi_dataset, [cru31_dataset], [bias])
-print("Executing the Evaluation using the object's run() method")
-bias_evaluation.run()
- 
-""" Step 6: Make a Plot from the Evaluation.results """
-# The Evaluation.results are a set of nested lists to support many different
-# possible Evaluation scenarios.
-#
-# The Evaluation results docs say:
-# The shape of results is (num_metrics, num_target_datasets) if no subregion
-# Accessing the actual results when we have used 1 metric and 1 dataset is
-# done this way:
-print("Accessing the Results of the Evaluation run")
-results = bias_evaluation.results[0][0]
- 
-# From the bias output I want to make a Contour Map of the region
-print("Generating a contour map using ocw.plotter.draw_contour_map()")
- 
-lats = new_lats
-lons = new_lons
-fname = OUTPUT_PLOT
-gridshape = (1, 1)  # Using a 1 x 1 since we have a single Bias for the full time range
-plot_title = "TASMAX Bias of KNMI Compared to CRU 3.1 (%s - %s)" % (start_time.strftime("%Y/%d/%m"), end_time.strftime("%Y/%d/%m"))
-sub_titles = ["Full Temporal Range"]
- 
-plotter.draw_contour_map(results, lats, lons, fname,
-                         gridshape=gridshape, ptitle=plot_title, 
-                         subtitles=sub_titles)

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/model_ensemble_to_rcmed.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/model_ensemble_to_rcmed.py b/examples/old_examples/model_ensemble_to_rcmed.py
deleted file mode 100644
index 1f653a1..0000000
--- a/examples/old_examples/model_ensemble_to_rcmed.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import math
-import urllib
-from os import path
-
-import numpy as np
-
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-from ocw.dataset import Bounds as Bounds
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# This way we can easily adjust the time span of the retrievals
-YEARS = 1
-# Two Local Model Files 
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
-FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc"
-# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "tasmax_africa_bias_annual"
-
-# Download necessary NetCDF file if not present
-if path.exists(FILE_1):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
-
-if path.exists(FILE_2):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
-
-
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects """
-# Load local knmi model data
-knmi_dataset = local.load_file(FILE_1, "tasmax")
-knmi_dataset.name = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax"
-
-wrf311_dataset = local.load_file(FILE_2, "tasmax")
-wrf311_dataset.name = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax"
-
-
-
-""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
-print("Working with the rcmed interface to get CRU3.1 Daily-Max Temp")
-metadata = rcmed.get_parameters_metadata()
-
-cru_31 = [m for m in metadata if m['parameter_id'] == "39"][0]
-
-""" The RCMED API uses the following function to query, subset and return the 
-raw data from the database:
-
-rcmed.parameter_dataset(dataset_id, parameter_id, min_lat, max_lat, min_lon, 
-                        max_lon, start_time, end_time)
-
-The first two required params are in the cru_31 variable we defined earlier
-"""
-# Must cast to int since the rcmed api requires ints
-dataset_id = int(cru_31['dataset_id'])
-parameter_id = int(cru_31['parameter_id'])
-
-#  The spatial_boundaries() function returns the spatial extent of the dataset
-min_lat, max_lat, min_lon, max_lon = wrf311_dataset.spatial_boundaries()
-
-#  There is a boundry alignment issue with the datasets.  To mitigate this
-#  we will use the math.floor() and math.ceil() functions to shrink the 
-#  boundries slighty.
-min_lat = math.ceil(min_lat)
-max_lat = math.floor(max_lat)
-min_lon = math.ceil(min_lon)
-max_lon = math.floor(max_lon)
-
-print("Calculating the Maximum Overlap in Time for the datasets")
-
-cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
-cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
-knmi_start, knmi_end = knmi_dataset.time_range()
-# Set the Time Range to be the year 1989
-start_time = datetime.datetime(1989,1,1)
-end_time = datetime.datetime(1989,12,1)
-
-print("Time Range is: %s to %s" % (start_time.strftime("%Y-%m-%d"),
-                                          end_time.strftime("%Y-%m-%d")))
-
-print("Fetching data from RCMED...")
-cru31_dataset = rcmed.parameter_dataset(dataset_id,
-                                        parameter_id,
-                                        min_lat,
-                                        max_lat,
-                                        min_lon,
-                                        max_lon,
-                                        start_time,
-                                        end_time)
-
-""" Step 3: Resample Datasets so they are the same shape """
-
-print("Temporally Rebinning the Datasets to an Annual Timestep")
-# To run annual temporal Rebinning use a timedelta of 360 days.
-knmi_dataset = dsp.temporal_rebin(knmi_dataset, datetime.timedelta(days=360))
-wrf311_dataset = dsp.temporal_rebin(wrf311_dataset, datetime.timedelta(days=360))
-cru31_dataset = dsp.temporal_rebin(cru31_dataset, datetime.timedelta(days=360))
-
-# Running Temporal Rebin early helps negate the issue of datasets being on different 
-# days of the month (1st vs. 15th)
-# Create a Bounds object to use for subsetting
-new_bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
-
-# Subset our model datasets so they are the same size
-knmi_dataset = dsp.subset(new_bounds, knmi_dataset)
-wrf311_dataset = dsp.subset(new_bounds, wrf311_dataset)
-
-""" Spatially Regrid the Dataset Objects to a 1/2 degree grid """
-# Using the bounds we will create a new set of lats and lons on 1/2 degree step
-new_lons = np.arange(min_lon, max_lon, 0.5)
-new_lats = np.arange(min_lat, max_lat, 0.5)
- 
-# Spatially regrid datasets using the new_lats, new_lons numpy arrays
-knmi_dataset = dsp.spatial_regrid(knmi_dataset, new_lats, new_lons)
-wrf311_dataset = dsp.spatial_regrid(wrf311_dataset, new_lats, new_lons)
-cru31_dataset = dsp.spatial_regrid(cru31_dataset, new_lats, new_lons)
-
-# Generate an ensemble dataset from knmi and wrf models
-ensemble_dataset = dsp.ensemble([knmi_dataset, wrf311_dataset])
-
-""" Step 4:  Build a Metric to use for Evaluation - Bias for this example """
-print("Setting up a Bias metric to use for evaluation")
-bias = metrics.Bias()
-
-""" Step 5: Create an Evaluation Object using Datasets and our Metric """
-# The Evaluation Class Signature is:
-# Evaluation(reference, targets, metrics, subregions=None)
-# Evaluation can take in multiple targets and metrics, so we need to convert
-# our examples into Python lists.  Evaluation will iterate over the lists
-print("Making the Evaluation definition")
-bias_evaluation = evaluation.Evaluation(cru31_dataset, 
-                      [knmi_dataset, wrf311_dataset, ensemble_dataset],
-                      [bias])
-print("Executing the Evaluation using the object's run() method")
-bias_evaluation.run()
- 
-""" Step 6: Make a Plot from the Evaluation.results """
-# The Evaluation.results are a set of nested lists to support many different
-# possible Evaluation scenarios.
-#
-# The Evaluation results docs say:
-# The shape of results is (num_target_datasets, num_metrics) if no subregion
-# Accessing the actual results when we have used 3 datasets and 1 metric is
-# done this way:
-print("Accessing the Results of the Evaluation run")
-results = bias_evaluation.results
- 
-# From the bias output I want to make a Contour Map of the region
-print("Generating a contour map using ocw.plotter.draw_contour_map()")
- 
-lats = new_lats
-lons = new_lons
-fname = OUTPUT_PLOT
-gridshape = (3, 1)  # Using a 3 x 1 since we have a 1 year of data for 3 models
-plotnames = ["KNMI", "WRF311", "ENSEMBLE"]
-for i, result in enumerate(results):
-  plot_title = "TASMAX Bias of CRU 3.1 vs. %s (%s - %s)" % (plotnames[i], start_time.strftime("%Y/%d/%m"), end_time.strftime("%Y/%d/%m"))
-  output_file = "%s_%s" % (fname, plotnames[i].lower())
-  print "creating %s" % (output_file,)
-  plotter.draw_contour_map(result[0], lats, lons, output_file,
-                         gridshape=gridshape, ptitle=plot_title)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/multi_model_evaluation.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/multi_model_evaluation.py b/examples/old_examples/multi_model_evaluation.py
deleted file mode 100644
index 8136001..0000000
--- a/examples/old_examples/multi_model_evaluation.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import numpy as np
-from os import path
-
-
-#import Apache OCW dependences
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-from ocw.dataset import Bounds as Bounds
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-import ocw.utils as utils
-import ssl
-if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
-  
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Three Local Model Files 
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-FILE_2 = "AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
-FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "pr_africa_bias_annual"
-#variable that we are analyzing
-varName = 'pr' 
-# Spatial and temporal configurations
-LAT_MIN = -45.0 
-LAT_MAX = 42.24
-LON_MIN = -24.0
-LON_MAX = 60.0 
-START = datetime.datetime(2000, 1, 1)
-END = datetime.datetime(2007, 12, 31)
-EVAL_BOUNDS = Bounds(LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-#regridding parameters
-gridLonStep=0.5
-gridLatStep=0.5
-
-#list for all target_datasets
-target_datasets =[]
-#list for names for all the datasets
-allNames =[]
-
-
-# Download necessary NetCDF file if not present
-if path.exists(FILE_1):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
-
-if path.exists(FILE_2):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
-
-if path.exists(FILE_3):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
-
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
-target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
-target_datasets.append(local.load_file(FILE_2, varName, name="UC"))
-target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
-
-
-""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
-print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
-# the dataset_id and the parameter id were determined from  
-# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
-CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-""" Step 3: Resample Datasets so they are the same shape """
-print("Resampling datasets")
-CRU31 = dsp.water_flux_unit_conversion(CRU31)
-CRU31 = dsp.temporal_rebin(CRU31, datetime.timedelta(days=30))
-
-for member, each_target_dataset in enumerate(target_datasets):
-  target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])
-  target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-  target_datasets[member] = dsp.temporal_rebin(target_datasets[member], datetime.timedelta(days=30))    
-    
-
-""" Spatially Regrid the Dataset Objects to a user defined  grid """
-# Using the bounds we will create a new set of lats and lons 
-print("Regridding datasets")
-new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
-new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
-CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
-
-for member, each_target_dataset in enumerate(target_datasets):
-  target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
-
-#make the model ensemble
-target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name="ENS"
-
-#append to the target_datasets for final analysis
-target_datasets.append(target_datasets_ensemble)
-
-#find the mean value
-#way to get the mean. Note the function exists in util.py 
-_, CRU31.values = utils.calc_climatology_year(CRU31)
-CRU31.values = np.expand_dims(CRU31.values, axis=0)
-
-for member, each_target_dataset in enumerate(target_datasets):
-  _,target_datasets[member].values = utils.calc_climatology_year(target_datasets[member])
-  target_datasets[member].values = np.expand_dims(target_datasets[member].values, axis=0)
-
-
-for target in target_datasets:
-  allNames.append(target.name)
-
-#determine the metrics
-mean_bias = metrics.Bias()
-
-#create the Evaluation object
-RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31, # Reference dataset for the evaluation
-                                    # list of target datasets for the evaluation
-                                    target_datasets,
-                                    # 1 or more metrics to use in the evaluation
-                                    [mean_bias])   
-RCMs_to_CRU_evaluation.run()
-
-#extract the relevant data from RCMs_to_CRU_evaluation.results 
-#the results returns a list (num_target_datasets, num_metrics). See docs for further details
-rcm_bias = RCMs_to_CRU_evaluation.results[:][0] 
-#remove the metric dimension
-new_rcm_bias = np.squeeze(np.array(RCMs_to_CRU_evaluation.results))
-
-plotter.draw_contour_map(new_rcm_bias, new_lats, new_lons, gridshape=(2, 5),fname=OUTPUT_PLOT, subtitles=allNames, cmap='coolwarm_r')

http://git-wip-us.apache.org/repos/asf/climate/blob/fd310f40/examples/old_examples/multi_model_taylor_diagram.py
----------------------------------------------------------------------
diff --git a/examples/old_examples/multi_model_taylor_diagram.py b/examples/old_examples/multi_model_taylor_diagram.py
deleted file mode 100644
index f91ab3e..0000000
--- a/examples/old_examples/multi_model_taylor_diagram.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#Apache OCW lib immports
-from ocw.dataset import Dataset, Bounds
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-import ocw.utils as utils
-
-import datetime
-import numpy as np
-
-from os import path
-
-# File URL leader
-FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
-# Three Local Model Files 
-FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
-FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
-# Filename for the output image/plot (without file extension)
-OUTPUT_PLOT = "pr_africa_taylor"
-
-# Spatial and temporal configurations
-LAT_MIN = -45.0 
-LAT_MAX = 42.24
-LON_MIN = -24.0
-LON_MAX = 60.0 
-START = datetime.datetime(2000, 01, 1)
-END = datetime.datetime(2007, 12, 31)
-EVAL_BOUNDS = Bounds(LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-#variable that we are analyzing
-varName = 'pr' 
-
-#regridding parameters
-gridLonStep=0.5
-gridLatStep=0.5
-
-#some vars for this evaluation
-target_datasets_ensemble=[]
-target_datasets =[]
-ref_datasets =[]
-
-# Download necessary NetCDF file if not present
-if path.exists(FILE_1):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
-
-if path.exists(FILE_2):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
-
-if path.exists(FILE_3):
-    pass
-else:
-    urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
-
-""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
-target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
-target_datasets.append(local.load_file(FILE_2, varName, name="REGM3"))
-target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
-
-
-""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
-print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
-# the dataset_id and the parameter id were determined from  
-# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
-CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
-
-""" Step 3: Resample Datasets so they are the same shape """
-print("Resampling datasets ...")
-print("... on units")
-CRU31 = dsp.water_flux_unit_conversion(CRU31)
-print("... temporal")
-CRU31 = dsp.temporal_rebin(CRU31, datetime.timedelta(days=30))
-
-for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-	target_datasets[member] = dsp.temporal_rebin(target_datasets[member], datetime.timedelta(days=30)) 
-	target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])	
-	
-#Regrid
-print("... regrid")
-new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
-new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
-CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
-
-for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
-	
-#find the mean values
-#way to get the mean. Note the function exists in util.py as def calc_climatology_year(dataset):
-CRU31.values,_ = utils.calc_climatology_year(CRU31)
-CRU31.values = np.expand_dims(CRU31.values, axis=0)
-
-#make the model ensemble
-target_datasets_ensemble = dsp.ensemble(target_datasets)
-target_datasets_ensemble.name="ENS"
-
-#append to the target_datasets for final analysis
-target_datasets.append(target_datasets_ensemble)
-
-for member, each_target_dataset in enumerate(target_datasets):
-	target_datasets[member].values,_ = utils.calc_climatology_year(target_datasets[member])
-	target_datasets[member].values = np.expand_dims(target_datasets[member].values, axis=0)
-	
-allNames =[]
-
-for target in target_datasets:
-	allNames.append(target.name)
-
-#calculate the metrics
-pattern_correlation = metrics.PatternCorrelation()
-spatial_std_dev = metrics.StdDevRatio()
-
-
-#create the Evaluation object
-RCMs_to_CRU_evaluation = evaluation.Evaluation(CRU31, # Reference dataset for the evaluation
-                                    # 1 or more target datasets for the evaluation                
-                                    target_datasets,
-                                    # 1 or more metrics to use in the evaluation
-                                    [spatial_std_dev, pattern_correlation])#, mean_bias,spatial_std_dev_ratio, pattern_correlation])   
-RCMs_to_CRU_evaluation.run()
-
-rcm_std_dev = [results[0] for results in RCMs_to_CRU_evaluation.results]
-rcm_pat_cor = [results[1] for results in RCMs_to_CRU_evaluation.results]
-
-taylor_data = np.array([rcm_std_dev, rcm_pat_cor]).transpose()
-
-new_taylor_data = np.squeeze(np.array(taylor_data))
-
-plotter.draw_taylor_diagram(new_taylor_data,
-                        allNames, 
-                        "CRU31",
-                        fname=OUTPUT_PLOT,
-                        fmt='png',
-                        frameon=False)
-
-                              
-