You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by hu...@apache.org on 2016/01/21 22:51:59 UTC

[1/7] climate git commit: CLIMATE-720 - Revise file structure

Repository: climate
Updated Branches:
  refs/heads/master 8bc19c65a -> d9e3c7e73


http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/ocw-cli/cli_app.py
----------------------------------------------------------------------
diff --git a/ocw-cli/cli_app.py b/ocw-cli/cli_app.py
deleted file mode 100644
index 60f5219..0000000
--- a/ocw-cli/cli_app.py
+++ /dev/null
@@ -1,1438 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import curses
-import sys
-import os
-import numpy as np
-import getpass
-import urllib2
-import json
-
-from netCDF4 import Dataset
-from datetime import datetime, timedelta
-
-import ocw.metrics as metrics
-import ocw.plotter as plotter
-import ocw.dataset_processor as dsp
-import ocw.evaluation as evaluation
-import ocw.data_source.rcmed as rcmed
-from ocw.dataset import Bounds
-from ocw.data_source.local import load_file
-import ocw.utils as utils
-import ocw.data_source.esgf as esgf
-from ocw_config_runner.configuration_writer import export_evaluation_to_config
-
-import ssl
-if hasattr(ssl, '_create_unverified_context'):
-    ssl._create_default_https_context = ssl._create_unverified_context
-
-def ready_screen(page, note=""):
-    ''' Generates page borders, header, footer and notification center.
-
-    :param page: Name of current page
-    :type page: string
-    :param note: Notification that system returns and will be shown
-         at the bottom of page
-    :type note: string
-
-    :returns: y and x as location of text on screen
-    :rtype: integer
-    '''
-
-    screen.clear()
-    y, x = screen.getmaxyx()
-    screen.border(0)
-    screen.addstr(0, x/2-len(TITLE)/2, TITLE)
-    screen.addstr(y-1, x/2-len(ORGANIZATION)/2, ORGANIZATION)
-    screen.addstr(y-3, 1, "Notification:")
-    for each in range(1, x-1):
-         screen.addstr(y-4, each, "-")
-    if page == "main_menu":
-         screen.addstr(y-3, x-21, "(NC) = Not complete")
-         screen.addstr(y-2, x-21, "(C)  = Complete")
-    if page == "settings_screen":
-         for i in range(y-5):
-              screen.addstr(i+1, x/2-2, ".")
-    screen.addstr(y-2, 1, note)
-
-    return y, x
-
-
-def get_esgf_netCDF_file_name(esgf_dataset_id, esgf_variable):
-    dataset_info = esgf._get_file_download_data(esgf_dataset_id, esgf_variable)
-    netCDF_name = dataset_info[0][0].split("/")[-1]
-
-    return netCDF_name
-
-
-##############################################################
-#         Manage Model Screen
-##############################################################
-
-def load_local_model_screen(header):
-    '''Generates screen to be able to load local model file.
-    Path to model file (netCDF) and variable name is required.
-
-    :param header: Header of page
-    :type header: string
-
-    :returns: Notification
-    :rtype: string
-    '''
-
-    ready_screen("load_local_model_screen")
-    screen.addstr(1, 1, header + " > Load Local Model File ")
-    screen.addstr(4, 2, "Enter model path: ")
-    model_path = screen.getstr()
-    try:
-         netCDF_file = Dataset(model_path, 'r')
-         all_netcdf_variables = [variable.encode() for variable in netCDF_file.variables.keys()]
-         try:
-              screen.addstr(6, 2, "Enter model variable name {0}: ".format(all_netcdf_variables))
-              variable_name = screen.getstr()
-              screen.addstr(7, 4, "{0}".format(netCDF_file.variables[variable_name]))
-              screen.addstr(20, 2, "Confirm:")
-              screen.addstr(21, 4, "0- No")
-              screen.addstr(22, 4, "1- Yes")
-              screen.addstr(23, 3, "Would you take this variable:")
-              answer = screen.getstr()
-              if answer == "0":
-                   note = "WARNING: Model file cannot be added."
-              elif answer == "1":
-                   model_dataset = load_file(model_path, variable_name)
-                   model_datasets.append(model_dataset)
-                   models_info.append({'directory': model_path, 'variable_name': variable_name})
-                   note = "Model file successfully added."
-              else:
-                   note = "WARNING: Model file cannot be added."
-         except:
-              note = "WARNING: Model file cannot be added. The variable [{0}] is not accepted. Please try again.".format(variable_name)
-         netCDF_file.close()
-    except:
-         note = "WARNING: Model file cannot be read. Please check the file directory or format. Only netCDF format is accepted."
-
-    return note
-
-
-def load_esgf_model_screen(header):
-    '''Generates screen to be able to load ESGF model file.
-
-    :param header: Header of page
-    :type header: string
-
-    :returns: Notification
-    :rtype: string
-    '''
-
-    ready_screen("load_esgf_model_screen")
-    screen.addstr(1, 1, header + " > Download ESGF Dataset ")
-    screen.addstr(6, 1, "Enter Dataset ID:")
-    esgf_dataset_id = screen.getstr()
-    screen.addstr(7, 1, "Enter Variable:")
-    esgf_variable = screen.getstr()
-    screen.addstr(8, 1, "Enter Username:")
-    esgf_username = screen.getstr()
-    screen.addstr(9, 1, "Enter Password:")
-    esgf_password = screen.getstr()
-    try:
-        solr_url = "http://esg-datanode.jpl.nasa.gov/esg-search/search?id={0}&variable={1}&format=application%2Fsolr%2Bjson".format(esgf_dataset_id, esgf_variable)
-        metadata_json = json.load(urllib2.urlopen(solr_url))
-        if metadata_json['response']['docs'][0]["product"][0] != "observations":
-            screen.addstr(11, 4, "Title: {0}".format(metadata_json['response']['docs'][0]['title']))
-            screen.addstr(12, 4, "Start Date: {0}".format(metadata_json['response']['docs'][0]['datetime_start']))
-            screen.addstr(13, 4, "End Date: {0}".format(metadata_json['response']['docs'][0]['datetime_stop']))
-            screen.addstr(15, 2, "Confirm:")
-            screen.addstr(16, 4, "0- No")
-            screen.addstr(17, 4, "1- Yes")
-            screen.addstr(18, 3, "Would you take this dataset:")
-            answer = screen.getstr()
-            if answer == "0":
-                note = "WARNING: ESGF model file cannot be added."
-            elif answer == "1":
-                try:
-                    screen.addstr(20, 4, "Downloading dataset.....")
-                    screen.refresh()
-                    datasets = esgf.load_dataset(esgf_dataset_id,
-                                                esgf_variable,
-                                                esgf_username,
-                                                esgf_password)
-                    netCDF_name = get_esgf_netCDF_file_name(esgf_dataset_id, esgf_variable)
-                    netCDF_path = "/tmp/{0}".format(netCDF_name)
-                    model_dataset = load_file(netCDF_path, esgf_variable)
-                    model_datasets.append(model_dataset)
-                    models_info.append({'directory': netCDF_path, 'variable_name': esgf_variable})
-                    note = "Dataset successfully downloaded."
-                except:
-                    note = "WARNING: Dataset has not been downloaded. Check your ESGF permission."
-        else:
-            note = "The selected dataset is Observation, please enter model dataset."
-    except:
-        note = "WARNING: Something went wrong in downloading model dataset from ESGF."
-
-    return  note
-
-
-def unload_model_screen(header):
-    '''Generates screen to be able to unload model file.
-    It lists all loaded model with index for each.
-    Selection of model with index will remove model from list of models.
-
-    :param header: Header of page
-    :type header: string
-
-    :returns: Notification
-    :rtype: string
-    '''
-
-    ready_screen("unload_model_screen")
-    screen.addstr(1, 1, header + " > Unload Model File")
-    screen.addstr(6, 1, "List of Model:")
-    for i, model in enumerate(models_info):
-         screen.addstr(8 + i, 10, "Model Number:[{0}] - Model path:[{1}] - Variables:[{2}]".format(str(i), model['directory'], model['variable_name']))
-    screen.addstr(3, 2, "Select the model number to remove (press enter to go back): ")
-    try:
-         model_remove_index = screen.getstr()
-         models_info.pop(int(model_remove_index))
-         model_datasets.pop(int(model_remove_index))
-         note = "Model file unloaded successfully"
-    except:
-         note = "WARNING: Model file not unloaded successfully."
-
-    return note
-
-
-def list_model_screen(header):
-    '''Generates screen to list all model files.
-
-    :param header: Header of page
-    :type header: string
-    '''
-
-    ready_screen("list_model_screen")
-    screen.addstr(1, 1, header + " > List Model File ")
-    screen.addstr(6, 6, "List of model(s): ")
-    for i, model in enumerate(models_info):
-         screen.addstr(8 + i, 10, "Model Number:[{0}] - Model path:[{1}] - Variables:[{2}]".format(str(i), model['directory'], model['variable_name']))
-    screen.addstr(4, 4, "Return to Manage Model (press Enter) :")
-    screen.getstr()
-
-
-def manage_model_screen(header, note=""):
-    '''Generates Manage Model screen.
-
-    :param header: Header of page
-    :type header: string
-    :param note: Notification, defult to empty string.
-    :type note: string
-    '''
-
-    option = ''
-    while option != '0':
-         ready_screen("manage_model_screen", note)
-         screen.addstr(1, 1, header)
-         screen.addstr(4, 4, "1 - Load Local Model File")
-         screen.addstr(6, 4, "2 - Load ESGF Model File")
-         screen.addstr(8, 4, "3 - Unload Model File")
-         screen.addstr(10, 4, "4 - List Model File")
-         screen.addstr(12, 4, "0 - Return to Main Menu")
-         screen.addstr(14, 2, "Select an option: ")
-         screen.refresh()
-         option = screen.getstr()
-
-         if option == '1':
-              note = load_local_model_screen(header)
-         if option == '2':
-              note = load_esgf_model_screen(header)
-         if option == '3':
-              note = unload_model_screen(header)
-         if option == '4':
-              note = list_model_screen(header)
-              note = " "
-
-
-##############################################################
-#     Manage Observation Screen
-##############################################################
-
-def select_obs_screen(header):   #TODO: if the observation is already selected, don't select again.
-    '''Generates screen to select observation.
-    It reterives list of observations from database and make a table from that.
-    User has to select observation with dataset_id, parameter_id.
-    If the size of terminal screen is small to show whole table, a notification with link to parameter table on website will show up instead.
-
-    :param header: Header of page
-    :type header: string
-
-    :returns: Notification
-    :rtype: string
-    '''
-
-    ready_screen("select_obs_screen")
-    screen.addstr(1, 1, header + " > Select Observation ")
-    screen.addstr(7, 1, "Observations Table: ")
-    screen.addstr(8, 2, "|D-ID| - |P-ID| - |Database")
-    screen.addstr(9, 2, "|----| - |----| - |--------")
-    all_obs_info = rcmed.get_parameters_metadata()
-    new_all_obs_info = []
-    for each in all_obs_info:
-        if not each['parameter_id'] in ['72', '73', '74', '75', '80', '42', '81', '84', '85', '86', '89', '90', '91', '94', '95', '96', '97', '98', '99', '100', '101', '103', '106']:
-            new_all_obs_info.append(each)
-    all_obs_info = new_all_obs_info
-    del new_all_obs_info
-    try:
-         for position, obs_info in enumerate(all_obs_info):
-            dataset_id = obs_info['dataset_id']
-            parameter_id = obs_info['parameter_id']
-            database = obs_info['database']
-            line = "|{0:>4}| - |{1:>4}| - |{2}".format(dataset_id, parameter_id, database)
-            if position <= 25:
-                 screen.addstr(10 + position, 2, line)
-            elif position > 25 and position <= 50:
-                 screen.addstr(8, 50, "|D-ID| - |P-ID| - |Database")
-                 screen.addstr(9, 50, "|----| - |----| - |--------")
-                 screen.addstr(10 + position - 26, 50, line)
-            else:
-                 screen.addstr(8, 100, "|D-ID| - |P-ID| - |Database")
-                 screen.addstr(9, 100, "|----| - |----| - |--------")
-                 screen.addstr(10 + position - 51, 100, line)
-    except:
-         ready_screen("select_obs_screen")
-         screen.addstr(1, 1, header + " > Select Observation ")
-         screen.addstr(10, 1, "Observation table cannot be shown due to small screen size. ")
-         screen.addstr(11, 1, "Please enlarge your screen and try again or refer to 'https://rcmes.jpl.nasa.gov/content/data-rcmes-database'. ")
-    try:
-         screen.addstr(2, 1, "More info for observation: https://rcmes.jpl.nasa.gov/content/data-rcmes-database")
-         screen.addstr(4, 2, "Enter Dataset ID (D-ID): ")
-         dataset_id = screen.getstr()
-         screen.addstr(5, 2, "Enter Parameter ID (P-ID): ")
-         parameter_id = screen.getstr()
-
-         for obs in all_obs_info:
-              if obs['dataset_id'] == dataset_id and obs['parameter_id'] == parameter_id:
-                   observations_info.append({
-                        'database':obs['database'],
-                        'dataset_id':dataset_id,
-                        'parameter_id':parameter_id,
-                        'start_date':obs['start_date'],
-                        'end_date':obs['end_date'],
-                        'bounding_box':obs['bounding_box'],
-                        'timestep':obs['timestep'],
-                        'min_lat':float(eval(obs['bounding_box'].encode())[2][0]) if obs['bounding_box'] else None,
-                        'max_lat':float(eval(obs['bounding_box'].encode())[0][0]) if obs['bounding_box'] else None,
-                        'min_lon':float(eval(obs['bounding_box'].encode())[2][1]) if obs['bounding_box'] else None,
-                        'max_lon':float(eval(obs['bounding_box'].encode())[0][1]) if obs['bounding_box'] else None,
-                        'lat_res':float(obs['lat_res'].encode()),
-                        'lon_res':float(obs['lon_res'].encode()),
-                        'unit':obs['units']
-                        })
-                   note = "Observation sucessfully selected."
-                   break
-              else:
-                   note = "WARNING: Observation cannot be selected. There is no observation with given info."
-    except:
-         note = "WARNING: Observation cannot be selected, dataset or parameter id is wrong."
-
-    return  note
-
-
-def load_esgf_obs_screen(header):
-    '''Generates screen to be able to load ESGF observation file.
-
-    :param header: Header of page
-    :type header: string
-
-    :returns: Notification
-    :rtype: string
-    '''
-
-    ready_screen("load_esgf_obs_screen")
-    screen.addstr(1, 1, header + " > Download ESGF Dataset ")
-    screen.addstr(6, 1, "Enter Dataset ID:")
-    esgf_dataset_id = screen.getstr()
-    screen.addstr(7, 1, "Enter Variable:")
-    esgf_variable = screen.getstr()
-    screen.addstr(8, 1, "Enter Username:")
-    esgf_username = screen.getstr()
-    screen.addstr(9, 1, "Enter Password:")
-    esgf_password = screen.getstr()
-    try:
-        solr_url = "http://esg-datanode.jpl.nasa.gov/esg-search/search?id={0}&variable={1}&format=application%2Fsolr%2Bjson".format(esgf_dataset_id, esgf_variable)
-        metadata_json = json.load(urllib2.urlopen(solr_url))
-        all_variables = metadata_json['response']['docs'][0]['variable']
-        variable_index = all_variables.index(esgf_variable)
-        if metadata_json['response']['docs'][0]["product"][0] == "observations":
-            screen.addstr(11, 4, "Variable Long Name: {0}".format(metadata_json['response']['docs'][0]['variable_long_name'][variable_index]))
-            screen.addstr(12, 4, "Start Date: {0}".format(metadata_json['response']['docs'][0]['datetime_start']))
-            screen.addstr(13, 4, "End Stop: {0}".format(metadata_json['response']['docs'][0]['datetime_stop']))
-            screen.addstr(14, 4, "Time Frequency: {0}".format(metadata_json['response']['docs'][0]['time_frequency']))
-            screen.addstr(15, 4, "Variable Units: {0}".format(metadata_json['response']['docs'][0]['variable_units'][variable_index]))
-            screen.addstr(16, 4, "East Degrees: {0}".format(metadata_json['response']['docs'][0]['east_degrees']))
-            screen.addstr(17, 4, "North Degrees: {0}".format(metadata_json['response']['docs'][0]['north_degrees']))
-            screen.addstr(18, 4, "South Degrees: {0}".format(metadata_json['response']['docs'][0]['south_degrees']))
-            screen.addstr(19, 4, "West Degrees: {0}".format(metadata_json['response']['docs'][0]['west_degrees']))
-            screen.addstr(22, 2, "Confirm:")
-            screen.addstr(23, 4, "0- No")
-            screen.addstr(24, 4, "1- Yes")
-            screen.addstr(25, 3, "Would you take this dataset:")
-            answer = screen.getstr()
-            if answer == "0":
-                note = "WARNING: ESGF observation file cannot be added."
-            elif answer == "1":
-                try:
-                    screen.addstr(27, 4, "Downloading dataset.....")
-                    screen.refresh()
-                    datasets = esgf.load_dataset(esgf_dataset_id,
-                                                esgf_variable,
-                                                esgf_username,
-                                                esgf_password)
-                    netCDF_name = get_esgf_netCDF_file_name(esgf_dataset_id, esgf_variable)
-                    netCDF_path = "/tmp/{0}".format(netCDF_name)
-                    obs_dataset = load_file(netCDF_path, esgf_variable)
-                    observations_info.append({
-                     'database':"{0}".format(netCDF_path),
-                     'dataset_id':"esgf".format(esgf_variable),
-                     'parameter_id':"{0}".format(esgf_variable),
-                     'start_date': obs_dataset.time_range()[0].strftime("%Y-%m-%d"),
-                     'end_date':obs_dataset.time_range()[1].strftime("%Y-%m-%d"),
-                     #'bounding_box':obs['bounding_box'],
-                     'timestep':"monthly",
-                     'min_lat':obs_dataset.spatial_boundaries()[0],
-                     'max_lat':obs_dataset.spatial_boundaries()[1],
-                     'min_lon':obs_dataset.spatial_boundaries()[2],
-                     'max_lon':obs_dataset.spatial_boundaries()[3],
-                     'lat_res':obs_dataset.spatial_resolution()[0],
-                     'lon_res':obs_dataset.spatial_resolution()[1],
-                     'unit':"{0}".format(metadata_json['response']['docs'][0]['variable_units'][1])
-                     })
-                    note = "Dataset successfully downloaded."
-                except:
-                    note = "WARNING: Dataset has not been downloaded."
-        else:
-            note = "The selected dataset is not Observation, please enter observation dataset."
-    except:
-        note = "WARNING: Something went wrong in downloading observation dataset from ESGF."
-
-    return  note
-
-
-def unselect_obs_screen(header):
-    '''Generates screen to be able to unselect observations.
-    Observations can be unselected by entering index allocated to them.
-
-    :param header: Header of page
-    :type header: string
-
-    :returns: Notification
-    :rtype: string
-    '''
-
-    ready_screen("unselect_obs_screen")
-    screen.addstr(1, 1, header + " > Unselect Observation ")
-    screen.addstr(6, 1, "List Observation(s):")
-    for i, obs_info in enumerate(observations_info):
-         screen.addstr(8 + i, 10, " [" + str(i) + "] : " + " Dataset ID: " + obs_info['dataset_id'] + " - Parameter ID: "+ obs_info['parameter_id'] + " - Database: "+ obs_info['database'])
-    screen.addstr(3, 2, "Select the observation to remove (press enter to go back): ")
-    try:
-         obs_remove_index = screen.getstr()
-         observations_info.pop(int(obs_remove_index))
-         note = "Observation sucessfully unselected."
-    except:
-         note = "WARNING: Unselecting model was not successful."
-
-    return note
-
-
-def list_obs_screen(header):
-    '''Generates screen to list observations.
-
-    :param header: Header of page
-    :type header: string
-    '''
-
-    ready_screen("list_obs_screen")
-    screen.addstr(1, 1, header + " > List Observation ")
-    screen.addstr(6, 6, "List of observation(s): ")
-    for i, obs_info in enumerate(observations_info):
-         screen.addstr(8 + i, 10, " [" + str(i) + "] : " + " Dataset ID: " + obs_info['dataset_id'] + " - Parameter ID: "+ obs_info['parameter_id'] + " - Database: "+ obs_info['database'])
-    screen.addstr(4, 4, "Return to Manage Observation (press Enter) :")
-    screen.getstr()
-
-
-def manage_obs_screen(header, note=""):
-    '''Generates Manage Observation screen.
-
-    :param header: Header of page
-    :type header: string
-    :param note: Notification, defult to empty string.
-    :type note: string
-    '''
-
-    option = ''
-    while option != '0':
-         ready_screen("manage_obs_screen", note)
-         screen.addstr(1, 1, header)
-         screen.addstr(4, 4, "1 - Select Observation")
-         screen.addstr(6, 4, "2 - Load ESGF Observation")
-         screen.addstr(8, 4, "3 - Unselect Observation")
-         screen.addstr(10, 4, "4 - List Observation")
-         screen.addstr(12, 4, "0 - Return to Main Menu")
-         screen.addstr(14, 2, "Select an option: ")
-         screen.refresh()
-
-         option = screen.getstr()
-         if option == '1':
-              note = select_obs_screen(header)
-         if option == '2':
-              note = load_esgf_obs_screen(header)
-         if option == '3':
-              note = unselect_obs_screen(header)
-         if option == '4':
-              list_obs_screen(header)
-              note = " "
-
-
-##############################################################
-#     Run Evaluation Screen
-##############################################################
-
-def run_screen(model_datasets, models_info, observations_info,
-               overlap_start_time, overlap_end_time, overlap_min_lat,
-               overlap_max_lat, overlap_min_lon, overlap_max_lon,
-               temp_grid_setting, spatial_grid_setting_lat, spatial_grid_setting_lon, reference_dataset, target_datasets, metric, working_directory, plot_title):
-    '''Generates screen to show running evaluation process.
-
-    :param model_datasets: list of model dataset objects
-    :type model_datasets: list
-    :param models_info: list of dictionaries that contain information for each model
-    :type models_info: list
-    :param observations_info: list of dictionaries that contain information for each observation
-    :type observations_info: list
-    :param overlap_start_time: overlap start time between model and obs start time
-    :type overlap_start_time: datetime
-    :param overlap_end_time: overlap end time between model and obs end time
-    :type overlap_end_time: float
-    :param overlap_min_lat: overlap minimum lat between model and obs minimum lat
-    :type overlap_min_lat: float
-    :param overlap_max_lat: overlap maximum lat between model and obs maximum lat
-    :type overlap_max_lat: float
-    :param overlap_min_lon: overlap minimum lon between model and obs minimum lon
-    :type overlap_min_lon: float
-    :param overlap_max_lon: overlap maximum lon between model and obs maximum lon
-    :type overlap_max_lon: float
-    :param temp_grid_setting: temporal grid option such as hourly, daily, monthly and annually
-    :type temp_grid_setting: string
-    :param spatial_grid_setting:
-    :type spatial_grid_setting: string
-    :param reference_dataset: dictionary of reference dataset
-    :type reference_dataset: dictionary
-    :param target_datasets: dictionary of all target datasets
-    :type target_datasets: dictionary
-    :param metric: name of selected metric
-    :type metric: string
-    :param working_directory: path to a directory for storring outputs
-    :type working_directory: string
-    :param plot_title: Title for plot
-    :type plot_title: string
-    '''
-    try:
-        target_datasets_ensemble = []
-        new_model_datasets = model_datasets[:]
-
-        option = None
-        if option != "0":
-             ready_screen("run_evaluation_screen")
-             y = screen.getmaxyx()[0]
-             screen.addstr(2, 2, "Evaluation started....")
-             screen.refresh()
-
-             screen.addstr(4, 4, "Retrieving data...")
-             screen.refresh()
-             obs_dataset = []
-             for i in range(len(observations_info)):
-                  if observations_info[i]['dataset_id'] == "esgf":
-                      obs_dataset.append(load_file(observations_info[i]['database'], observations_info[i]['parameter_id']))
-                  else:
-                      dataset_id = int(observations_info[i]['dataset_id'])
-                      parameter_id = int(observations_info[i]['parameter_id'])
-                      obs_dataset.append(rcmed.parameter_dataset(
-                          dataset_id,
-                          parameter_id,
-                          overlap_min_lat,
-                          overlap_max_lat,
-                          overlap_min_lon,
-                          overlap_max_lon,
-                          overlap_start_time,
-                          overlap_end_time))
-
-             screen.addstr(4, 4, "--> Data retrieved.")
-             screen.refresh()
-
-             EVAL_BOUNDS = Bounds(overlap_min_lat, overlap_max_lat, overlap_min_lon, overlap_max_lon, overlap_start_time, overlap_end_time)
-
-             screen.addstr(5, 4, "Temporally regridding...")
-             screen.refresh()
-             if temp_grid_setting.lower() == 'hourly':
-                  days = 0.5
-             elif temp_grid_setting.lower() == 'daily':
-                  days = 1
-             elif temp_grid_setting.lower() == 'monthly':
-                  days = 31
-             else:
-                  days = 365
-             for i in range(len(obs_dataset)):
-                  obs_dataset[i] = dsp.temporal_rebin(obs_dataset[i], timedelta(days))
-
-             for member, each_target_dataset in enumerate(new_model_datasets):
-                  new_model_datasets[member] = dsp.temporal_rebin(new_model_datasets[member], timedelta(days))
-                  if each_target_dataset.lats.ndim !=2 and each_target_dataset.lons.ndim !=2:
-                      new_model_datasets[member] = dsp.subset(EVAL_BOUNDS, new_model_datasets[member])
-                  else:
-                      new_model_datasets[member] = dsp.temporal_slice(EVAL_BOUNDS.start, EVAL_BOUNDS.end, each_target_dataset)
-             screen.addstr(5, 4, "--> Temporally regridded.")
-             screen.refresh()
-
-             screen.addstr(6, 4, "Spatially regridding...")
-             screen.refresh()
-             new_lats = np.arange(overlap_min_lat, overlap_max_lat, spatial_grid_setting_lat)
-             new_lons = np.arange(overlap_min_lon, overlap_max_lon, spatial_grid_setting_lon)
-             for i in range(len(obs_dataset)):
-                  obs_dataset[i] = dsp.spatial_regrid(obs_dataset[i], new_lats, new_lons)
-                  obs_dataset[i] = dsp.variable_unit_conversion(obs_dataset[i])
-
-             for member, each_target_dataset in enumerate(new_model_datasets):
-                  new_model_datasets[member] = dsp.spatial_regrid(new_model_datasets[member], new_lats, new_lons)
-                  new_model_datasets[member] = dsp.variable_unit_conversion(new_model_datasets[member])
-             screen.addstr(6, 4, "--> Spatially regridded.")
-             screen.refresh()
-
-             obs_dataset = dsp.mask_missing_data(obs_dataset+new_model_datasets)[0:len(obs_dataset)]
-             new_model_datasets = dsp.mask_missing_data(obs_dataset+new_model_datasets)[len(obs_dataset):]
-
-             if metric == 'bias':
-                  allNames = []
-
-                  for model in new_model_datasets:
-                          allNames.append(model.name)
-
-                  screen.addstr(7, 4, "Setting up metrics...")
-                  screen.refresh()
-                  mean_bias = metrics.TemporalMeanBias()
-                  pattern_correlation = metrics.PatternCorrelation()
-                  spatial_std_dev_ratio = metrics.StdDevRatio()
-                  screen.addstr(7, 4, "--> Metrics setting done.")
-                  screen.refresh()
-
-                  screen.addstr(8, 4, "Running evaluation.....")
-                  screen.refresh()
-                  if reference_dataset[:3] == 'obs':
-                       reference = obs_dataset[int(reference_dataset[-1])]
-                  if reference_dataset[:3] == 'mod':
-                       reference = obs_dataset[int(new_model_datasets[-1])]
-
-                  targets = []
-                  for target in target_datasets:
-                       if target[:3] == 'obs':
-                            targets.append(obs_dataset[int(target[-1])])
-                       if target[:3] == 'mod':
-                            targets.append(new_model_datasets[int(target[-1])])
-
-                  evaluation_result = evaluation.Evaluation(reference, targets, [mean_bias])
-                  #export_evaluation_to_config(evaluation_result)
-                  evaluation_result.run()
-                  screen.addstr(8, 4, "--> Evaluation Finished.")
-                  screen.refresh()
-
-                  screen.addstr(9, 4, "Generating plots....")
-                  screen.refresh()
-                  new_rcm_bias = evaluation_result.results[0]
-
-                  if not os.path.exists(working_directory):
-                       os.makedirs(working_directory)
-
-                  fname = working_directory + 'Bias_contour'
-                  fname2= working_directory + 'Obs_contour'
-                  fname3= working_directory + 'Model_contour'
-                  plotter.draw_contour_map(new_rcm_bias, new_lats, new_lons, gridshape=(2, 5), fname=fname, subtitles=allNames, cmap='coolwarm_r')
-                  plotter.draw_contour_map(utils.calc_temporal_mean(reference), new_lats, new_lons, gridshape=(2, 5), fname=fname2, subtitles=allNames, cmap='coolwarm_r')
-                  plotter.draw_contour_map(utils.calc_temporal_mean(targets[0]), new_lats, new_lons, gridshape=(2, 5), fname=fname3, subtitles=allNames, cmap='coolwarm_r')
-                  screen.addstr(9, 4, "--> Plots generated.")
-                  screen.refresh()
-                  screen.addstr(y-2, 1, "Press 'enter' to Exit: ")
-                  option = screen.getstr()
-
-             if metric == 'std':
-                  for i in range(len(obs_dataset)):
-                       _, obs_dataset[i].values = utils.calc_climatology_year(obs_dataset[i])
-                       obs_dataset[i].values = np.expand_dims(obs_dataset[i].values, axis=0)
-
-                  target_datasets_ensemble = dsp.ensemble(new_model_datasets)
-                  target_datasets_ensemble.name = "ENS"
-                  new_model_datasets.append(target_datasets_ensemble)
-
-                  for member, each_target_dataset in enumerate(new_model_datasets):
-                          _, new_model_datasets[member].values = utils.calc_climatology_year(new_model_datasets[member])
-                          new_model_datasets[member].values = np.expand_dims(new_model_datasets[member].values, axis=0)
-
-                  allNames = []
-
-                  for model in new_model_datasets:
-                          allNames.append(model.name)
-                  pattern_correlation = metrics.PatternCorrelation()
-                  spatial_std_dev = metrics.StdDevRatio()
-
-                  if reference_dataset[:3] == 'obs':
-                       reference = obs_dataset[int(reference_dataset[-1])]
-                  if reference_dataset[:3] == 'mod':
-                       reference = obs_dataset[int(new_model_datasets[-1])]
-
-                  targets = []
-                  for target in target_datasets:
-                       if target[:3] == 'obs':
-                            targets.append(obs_dataset[int(target[-1])])
-                       if target[:3] == 'mod':
-                            targets.append(new_model_datasets[int(target[-1])])
-
-                  evaluation_result = evaluation.Evaluation(reference, targets, [spatial_std_dev])
-                  export_evaluation_to_config(evaluation_result)
-                  evaluation_result.run()
-
-                  rcm_std_dev = evaluation_result.results
-                  evaluation_result = evaluation.Evaluation(reference, targets, [pattern_correlation])
-                  evaluation_result.run()
-
-                  rcm_pat_cor = evaluation_result.results
-                  taylor_data = np.array([rcm_std_dev, rcm_pat_cor]).transpose()
-                  new_taylor_data = np.squeeze(np.array(taylor_data))
-
-                  if not os.path.exists(working_directory):
-                       os.makedirs(working_directory)
-
-                  fname = working_directory + 'taylor_plot'
-
-                  plotter.draw_taylor_diagram(new_taylor_data, allNames, "CRU31", fname=fname, fmt='png', frameon=False)
-        del new_model_datasets
-        del obs_dataset
-        return "No error"
-    except Exception, error:
-         return "Error: {0}".format(error[0][:200])
-
-
-##############################################################
-#     Settings Screen
-##############################################################
-
-def get_models_temp_bound():
-    '''Get models temporal bound.
-
-    :returns: model start and end time
-    :rtypes: (datatime, datetime)
-    '''
-
-    models_start_time = []
-    models_end_time = []
-    for model in model_datasets:
-         models_start_time.append(model.time_range()[0])
-         models_end_time.append(model.time_range()[1])
-
-    return models_start_time, models_end_time
-
-
-def get_obs_temp_bound():
-    '''Get observation temporal bound.
-
-    :returns: observation start and end time
-    :rtype: (datetime, datetime)
-    '''
-
-    observations_start_time = []
-    observations_end_time = []
-    for obs in observations_info:
-         obs_start_time = datetime.strptime(obs['start_date'], "%Y-%m-%d")
-         observations_start_time.append(obs_start_time)
-         obs_end_time = datetime.strptime(obs['end_date'], "%Y-%m-%d")
-         observations_end_time.append(obs_end_time)
-
-    return observations_start_time, observations_end_time
-
-
-def get_models_temp_overlap(models_start_time, models_end_time):
-    '''Calculate temporal overlap between all the models
-
-    :param models_start_time: models start time
-    :type models_start_time: list of datetimes
-    :param models_end_time: models end time
-    :type models_end_time: list of datetime
-
-    :returns: overlap start and end time between all the models
-    :rtype: (datetime, datetime)
-    '''
-
-    models_overlap_start_time = max(models_start_time)
-    models_overlap_end_time = min(models_end_time)
-
-    #Need to check if all models have temporal overlap, otherwise return
-    # to main menu and print a warning as notification.
-    if models_overlap_end_time <= models_overlap_start_time:
-         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more model does not have temporal overlap with others.")
-
-    return models_overlap_start_time, models_overlap_end_time
-
-
-def get_obs_temp_overlap(observations_start_time, observations_end_time):
-    '''Calculate temporal overlap between all the observations
-
-    :param observations_start_time: observations start time
-    :type observations_start_time: list of datetimes
-    :param observations_end_time: observations end time
-    :type observations_end_time: list of datetime
-
-    :returns: overlap start and end time between all the observations
-    :rtype: (datetime, datetime)
-    '''
-
-    obs_overlap_start_time = max(observations_start_time)
-    obs_overlap_end_time = min(observations_end_time)
-
-    #Need to check if all observations have temporal overlap, otherwise return
-    # to main menu and print a warning as notification.
-    if obs_overlap_end_time <= obs_overlap_start_time:
-         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more observation does not have temporal overlap with others.")
-
-    return obs_overlap_start_time, obs_overlap_end_time
-
-
-def get_all_temp_overlap(models_overlap_start_time, models_overlap_end_time, obs_overlap_start_time, obs_overlap_end_time):
-    '''Calculate temporal overlap between given datasets.
-
-    :param models_overlap_start_time: models overlap start time
-    :type models_overlap_start_time: list of datetimes
-    :param models_overlap_end_time: models overlap end time
-    :type models_overlap_end_time: list of datetime
-    :param obs_overlap_start_time: obs overlap start time
-    :type obs_overlap_start_time: list of datetimes
-    :param obs_overlap_end_time: obs overlap end time
-    :type obs_overlap_end_time: list of datetimes
-
-    :returns: overlap start and end time between models and observations
-    :rtype: (datetime, datetime)
-    '''
-
-    all_overlap_start_time = max([models_overlap_start_time, obs_overlap_start_time])
-    all_overlap_end_time = min([models_overlap_end_time, obs_overlap_end_time])
-
-    #Need to check if all datasets have temporal overlap, otherwise return
-    # to main menu and print a warning as notification.
-    if all_overlap_end_time <= all_overlap_start_time:
-         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more dataset does not have temporal overlap with others.")
-
-    return all_overlap_start_time, all_overlap_end_time
-
-
-def get_models_spatial_bound():               #TODO: convert longitudes to -180, 180 to match with observation data
-    '''Get all models spatial bound.
-
-    :returns: all models spatial boundaries
-    :rtype: list
-    '''
-
-    models_bound = []
-    for model in model_datasets:
-         models_bound.append(model.spatial_boundaries())
-
-    return models_bound
-
-
-def get_models_spatial_overlap(models_bound):
-    '''Calculate spatial overlap between all models.
-
-    :param models_bound: all models spatial boundaries information
-    :type models_bound: list
-
-    :returns: spatial boundaries overlap between all models
-    :rtype: (float, float, float, float)
-    '''
-
-    models_overlap_min_lat = max(each[0] for each in models_bound)
-    models_overlap_max_lat = min(each[1] for each in models_bound)
-    models_overlap_min_lon = max(each[2] for each in models_bound)
-    models_overlap_max_lon = min(each[3] for each in models_bound)
-
-    #Need to check if all models have spatial overlap, otherwise return
-    # to main menu and print a warning as notification.
-    if models_overlap_max_lat <= models_overlap_min_lat or models_overlap_max_lon <= models_overlap_min_lon:
-         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more model does not have spatial overlap with others.")
-
-    return models_overlap_min_lat, models_overlap_max_lat, models_overlap_min_lon, models_overlap_max_lon
-
-
-def get_obs_spatial_bound():
-    '''Get all observations spatial bound.
-
-    :returns: all observations spatial boundaries
-    :rtype: list
-    '''
-
-    observations_bound = []
-    for obs in observations_info:
-         observations_bound.append([obs['min_lat'], obs['max_lat'], obs['min_lon'], obs['max_lon']])
-
-    return observations_bound
-
-
-def get_obs_spatial_overlap(observations_bound):
-    '''Calculate spatial overlap between all observations.
-
-    :param observations_bound: all observations spatial boundaries information
-    :type observations_bound: list
-
-    :returns: spatial boundaries overlap between all observations
-    :rtype: (float, float, float, float)
-    '''
-
-    obs_overlap_min_lat = max(each[0] for each in observations_bound)
-    obs_overlap_max_lat = min(each[1] for each in observations_bound)
-    obs_overlap_min_lon = max(each[2] for each in observations_bound)
-    obs_overlap_max_lon = min(each[3] for each in observations_bound)
-
-    #Need to check if all observations have spatial overlap, otherwise return
-    # to main menu and print a warning as notification.
-    if obs_overlap_max_lat <= obs_overlap_min_lat or obs_overlap_max_lon <= obs_overlap_min_lon:
-         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more observation does not have spatial overlap with others.")
-
-    return obs_overlap_min_lat, obs_overlap_max_lat, obs_overlap_min_lon, obs_overlap_max_lon
-
-
-def get_all_spatial_overlap(models_overlap_min_lat, models_overlap_max_lat, models_overlap_min_lon, models_overlap_max_lon, obs_overlap_min_lat, obs_overlap_max_lat, obs_overlap_min_lon, obs_overlap_max_lon):
-    '''Calculate spatial overlap between all models and observations
-
-    :param models_overlap_min_lat: min latitude between all models
-    :type models_overlap_min_lat: float
-    :param models_overlap_max_lat: max latitude between all models
-    :type models_overlap_max_lat: float
-    :param models_overlap_min_lon: min longitude between all models
-    :type models_overlap_min_lon: float
-    :param models_overlap_max_lon: max longitude between all models
-    :type models_overlap_max_lon: float
-    :param obs_overlap_min_lat: min latitude between all onservations
-    :type obs_overlap_min_lat: float
-    :param obs_overlap_max_lat: max latitude between all onservations
-    :type obs_overlap_max_lat: float
-    :param obs_overlap_min_lon: min longitude between all onservations
-    :type obs_overlap_min_lon: float
-    :param obs_overlap_max_lon: max longitude between all onservations
-    :type obs_overlap_max_lon: float
-
-    :returns: spatial boundaries overlap between all models and observations
-    :rtype: (float, float, float, float)
-    '''
-
-    all_overlap_min_lat = max([models_overlap_min_lat, obs_overlap_min_lat])
-    all_overlap_max_lat = min([models_overlap_max_lat, obs_overlap_max_lat])
-    all_overlap_min_lon = max([models_overlap_min_lon, obs_overlap_min_lon])
-    all_overlap_max_lon = min([models_overlap_max_lon, obs_overlap_max_lon])
-
-    #Need to check if all datasets have spatial overlap, otherwise return
-    # to main menu and print a warning as notification.
-    if all_overlap_max_lat <= all_overlap_min_lat or all_overlap_max_lon <= all_overlap_min_lon:
-         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more dataset does not have spatial overlap with others.")
-
-    return all_overlap_min_lat, all_overlap_max_lat, all_overlap_min_lon, all_overlap_max_lon
-
-
-def get_models_temp_res():
-    '''Get models temporal resolution.
-
-    :returns: models resolution
-    :rtypes: string
-    '''
-
-    models_resolution = []
-    for model in model_datasets:
-         models_resolution.append(model.temporal_resolution())
-    dic = {0:"hourly", 1:"daily", 2:"monthly", 3:"yearly"}
-    models_resolution_key = []
-    for res in models_resolution:
-         for key, value in dic.items():
-              if value == res:
-                   models_resolution_key.append(key)
-
-    return dic[max(models_resolution_key)]
-
-
-def get_obs_temp_res():
-    '''Get observations temporal resolution.
-
-    :returns: observations resolution
-    :rtypes: string
-    '''
-
-    obs_resolution = []
-    for model in model_datasets:
-         obs_resolution.append(model.temporal_resolution())
-    dic = {0:"hourly", 1:"daily", 2:"monthly", 3:"yearly"}
-    obs_resolution_key = []
-    for res in obs_resolution:
-         for key, value in dic.items():
-              if value == res:
-                   obs_resolution_key.append(key)
-
-    return dic[max(obs_resolution_key)]
-
-
-def get_models_spatial_res():
-    '''Get models spatial resolution
-
-    :returns: maximum models latitude and longitude resolution
-    :rtypes: float, float
-    '''
-
-    models_lat_res = []
-    models_lon_res = []
-    for model in model_datasets:
-         models_lat_res.append(model.spatial_resolution()[0])
-         models_lon_res.append(model.spatial_resolution()[1])
-
-    return max(models_lat_res), max(models_lon_res)
-
-
-def get_obs_spatial_res():
-    '''Get observations spatial resolution
-
-    :returns: maximum observations latitude and longitude resolution
-    :rtypes: float, float
-    '''
-
-    obs_lat_res = []
-    obs_lon_res = []
-    for obs in observations_info:
-         obs_lat_res.append(obs['lat_res'])
-         obs_lon_res.append(obs['lon_res'])
-
-    return max(obs_lat_res), max(obs_lon_res)
-
-
-def settings_screen(header):
-    '''Generates screen for settings before running evaluation.
-
-    :param header: Header of page
-    :type header: string
-    '''
-
-    note = " "
-    models_start_time, models_end_time = get_models_temp_bound()
-    models_overlap_start_time, models_overlap_end_time = get_models_temp_overlap(models_start_time, models_end_time)
-    observations_start_time, observations_end_time = get_obs_temp_bound()
-    obs_overlap_start_time, obs_overlap_end_time = get_obs_temp_overlap(observations_start_time, observations_end_time)
-    all_overlap_start_time, all_overlap_end_time = get_all_temp_overlap(models_overlap_start_time, models_overlap_end_time, obs_overlap_start_time, obs_overlap_end_time)
-    models_bound = get_models_spatial_bound()
-    models_overlap_min_lat, models_overlap_max_lat, models_overlap_min_lon, models_overlap_max_lon = get_models_spatial_overlap(models_bound)
-    observations_bound = get_obs_spatial_bound()
-    obs_overlap_min_lat, obs_overlap_max_lat, obs_overlap_min_lon, obs_overlap_max_lon = get_obs_spatial_overlap(observations_bound)
-    all_overlap_min_lat, all_overlap_max_lat, all_overlap_min_lon, all_overlap_max_lon = get_all_spatial_overlap(models_overlap_min_lat,
-                                                                                                                 models_overlap_max_lat,
-                                                                                                                 models_overlap_min_lon,
-                                                                                                                 models_overlap_max_lon,
-                                                                                                                 obs_overlap_min_lat,
-                                                                                                                 obs_overlap_max_lat,
-                                                                                                                 obs_overlap_min_lon,
-                                                                                                                 obs_overlap_max_lon)
-    model_temp_res = get_models_temp_res()
-    obs_temp_res = get_obs_temp_res()
-    model_lat_res, model_lon_res = get_models_spatial_res()
-    obs_lat_res, obs_lon_res = get_obs_spatial_res()
-
-    temp_grid_option = "Observation"
-    temp_grid_setting = obs_temp_res
-    spatial_grid_option = "Observation"
-    spatial_grid_setting_lat = obs_lat_res
-    spatial_grid_setting_lon = obs_lon_res
-    models_dict = {}
-
-    for i in enumerate(models_info):
-         models_dict['mod{0}'.format(i[0])] = models_info[i[0]]
-    obs_dict = {}
-    for i in enumerate(observations_info):
-         obs_dict['obs{0}'.format(i[0])] = observations_info[i[0]]
-
-    reference_dataset = 'obs0'
-    target_datasets = []
-    for i in range(len(model_datasets)):
-         target_datasets.append('mod{0}'.format(i))
-    subregion_path = None
-    metrics_dict = {'1':'bias', '2':'std'}
-    metric = 'bias'
-    plots = {'bias':"contour map", 'std':"taylor diagram, bar chart(coming soon)"}
-    working_directory = os.getcwd() + "/plots/"  #Default value of working directory set to "plots" folder in current directory
-    plot_title = '' #TODO: ask user about plot title or figure out automatically
-
-    fix_min_time = all_overlap_start_time
-    fix_max_time = all_overlap_end_time
-    fix_min_lat = all_overlap_min_lat
-    fix_max_lat = all_overlap_max_lat
-    fix_min_lon = all_overlap_min_lon
-    fix_max_lon = all_overlap_max_lon
-
-    option = ''
-    while option != '0':
-         y, x = ready_screen("settings_screen", note)
-         screen.addstr(1, 1, header)
-         screen.addstr(3, 1, "INFORMATION")
-         screen.addstr(4, 1, "===========")
-         screen.addstr(6, 2, "Number of model file:   {0}".format(str(len(model_datasets))))
-         screen.addstr(7, 2, "Number of observation:  {0}".format(str(len(observations_info))))
-         screen.addstr(8, 2, "Temporal Boundaries:")
-         screen.addstr(9, 5, "Start time = {0}".format(all_overlap_start_time))
-         screen.addstr(10, 5, "End time = {0}".format(all_overlap_end_time))
-         screen.addstr(11, 2, "Spatial Boundaries:")
-         screen.addstr(12, 5, "min-lat = {0}".format(all_overlap_min_lat))
-         screen.addstr(13, 5, "max-lat = {0}".format(all_overlap_max_lat))
-         screen.addstr(14, 5, "min-lon = {0}".format(all_overlap_min_lon))
-         screen.addstr(15, 5, "max-lon = {0}".format(all_overlap_max_lon))
-         screen.addstr(16, 2, "Temporal Resolution:")
-         screen.addstr(17, 5, "Model = {0}".format(model_temp_res))
-         screen.addstr(18, 5, "Observation = {0}".format(obs_temp_res))
-         screen.addstr(19, 2, "Spatial Resolution:")
-         screen.addstr(20, 5, "Model:")
-         screen.addstr(21, 10, "lat = {0}".format(model_lat_res))
-         screen.addstr(22, 10, "lon = {0}".format(model_lon_res))
-         screen.addstr(23, 5, "Observation:")
-         screen.addstr(24, 10, "lat = {0}".format(obs_lat_res))
-         screen.addstr(25, 10, "lon = {0}".format(obs_lon_res))
-         screen.addstr(26, 2, "Temporal Grid Option:  {0}".format(temp_grid_option))
-         screen.addstr(27, 2, "Spatial Grid Option:   {0}".format(spatial_grid_option))
-         screen.addstr(28, 2, "Reference Dataset: {0}".format(reference_dataset))
-         screen.addstr(29, 2, "Target Dataset/s: {0}".format([mod for mod in target_datasets]))
-         screen.addstr(30, 2, "Working Directory:")
-         screen.addstr(31, 5, "{0}".format(working_directory))
-         screen.addstr(32, 2, "Metric: {0}".format(metric))
-         screen.addstr(33, 2, "Plot: {0}".format(plots[metric]))
-
-         screen.addstr(3, x/2, "MODIFICATION and RUN")
-         screen.addstr(4, x/2, "====================")
-         screen.addstr(6, x/2, "1 - Change Temporal Boundaries")
-         screen.addstr(7, x/2, "2 - Change Spatial Boundaries")
-         screen.addstr(8, x/2, "3 - Change Temporal Gridding")
-         screen.addstr(9, x/2, "4 - Change Spatial Gridding")
-         screen.addstr(10, x/2, "5 - Change Reference dataset")
-         screen.addstr(11, x/2, "6 - Change Target dataset/s")
-         screen.addstr(12, x/2, "7 - Change Metric")
-         screen.addstr(13, x/2, "8 - Change Working Directory")
-         #screen.addstr(14, x/2, "9 - Change Plot Title [Coming Soon....]")
-         #screen.addstr(15, x/2, "10 - Save the processed data [Coming Soon....]")
-         screen.addstr(14, x/2, "9 - Show Temporal Boundaries")
-         screen.addstr(15, x/2, "10 - Show Spatial Boundaries")
-         screen.addstr(16, x/2, "0 - Return to Main Menu")
-         screen.addstr(18, x/2, "r - Run Evaluation")
-         screen.addstr(20, x/2, "Select an option: ")
-
-         screen.refresh()
-         option = screen.getstr()
-
-         if option == '1':
-              screen.addstr(25, x/2, "Enter Start Time [min time: {0}] (Format YYYY-MM-DD):".format(fix_min_time))
-              new_start_time = screen.getstr()
-              try:
-                   new_start_time = datetime.strptime(new_start_time, '%Y-%m-%d')
-                   new_start_time_int = int("{0}{1}".format(new_start_time.year, new_start_time.month))
-                   fix_min_time_int = int("{0}{1}".format(fix_min_time.year, fix_min_time.month))
-                   fix_max_time_int = int("{0}{1}".format(fix_max_time.year, fix_max_time.month))
-                   all_overlap_end_time_int = int("{0}{1}".format(all_overlap_end_time.year, all_overlap_end_time.month))
-                   if new_start_time_int < fix_min_time_int \
-                   or new_start_time_int > fix_max_time_int \
-                   or new_start_time_int > all_overlap_end_time_int:
-                        note = "Start time has not changed. "
-                   else:
-                        all_overlap_start_time = new_start_time
-                        note = "Start time has changed successfully. "
-              except:
-                   note = "Start time has not changed. "
-              screen.addstr(26, x/2, "Enter End Time [max time:{0}] (Format YYYY-MM-DD):".format(fix_max_time))
-              new_end_time = screen.getstr()
-              try:
-                   new_end_time = datetime.strptime(new_end_time, '%Y-%m-%d')
-                   new_end_time_int = int("{0}{1}".format(new_end_time.year, new_end_time.month))
-                   fix_min_time_int = int("{0}{1}".format(fix_min_time.year, fix_min_time.month))
-                   fix_max_time_int = int("{0}{1}".format(fix_max_time.year, fix_max_time.month))
-                   all_overlap_start_time_int = int("{0}{1}".format(all_overlap_start_time.year, all_overlap_start_time.month))
-                   if new_end_time_int > fix_max_time_int \
-                   or new_end_time_int < fix_min_time_int \
-                   or new_end_time_int < all_overlap_start_time_int:
-                        note = note + " End time has not changed. "
-                   else:
-                        all_overlap_end_time = new_end_time
-                        note = note + " End time has changed successfully. "
-              except:
-                   note = note + " End time has not changed. "
-
-         if option == '2':
-              screen.addstr(25, x/2, "Enter Minimum Latitude [{0}]:".format(fix_min_lat))
-              new_min_lat = screen.getstr()
-              try:
-                   new_min_lat = float(new_min_lat)
-                   if new_min_lat < fix_min_lat or new_min_lat > fix_max_lat or new_min_lat > all_overlap_max_lat:
-                        note = "Minimum latitude has not changed. "
-                   else:
-                        all_overlap_min_lat = new_min_lat
-                        note = "Minimum latitude has changed successfully. "
-              except:
-                   note = "Minimum latitude has not changed. "
-              screen.addstr(26, x/2, "Enter Maximum Latitude [{0}]:".format(fix_max_lat))
-              new_max_lat = screen.getstr()
-              try:
-                   new_max_lat = float(new_max_lat)
-                   if new_max_lat > fix_max_lat or new_max_lat < fix_min_lat or new_max_lat < all_overlap_min_lat:
-                        note = note + " Maximum latitude has not changed. "
-                   else:
-                        all_overlap_max_lat = new_max_lat
-                        note = note + "Maximum latitude has changed successfully. "
-              except:
-                   note = note + " Maximum latitude has not changed. "
-              screen.addstr(27, x/2, "Enter Minimum Longitude [{0}]:".format(fix_min_lon))
-              new_min_lon = screen.getstr()
-              try:
-                   new_min_lon = float(new_min_lon)
-                   if new_min_lon < fix_min_lon or new_min_lon > fix_max_lon or new_min_lon > all_overlap_max_lon:
-                        note = note + " Minimum longitude has not changed. "
-                   else:
-                        all_overlap_min_lon = new_min_lon
-                        note = note + "Minimum longitude has changed successfully. "
-              except:
-                   note = note + " Minimum longitude has not changed. "
-              screen.addstr(28, x/2, "Enter Maximum Longitude [{0}]:".format(fix_max_lon))
-              new_max_lon = screen.getstr()
-              try:
-                   new_max_lon = float(new_max_lon)
-                   if new_max_lon > fix_max_lon or new_max_lon < fix_min_lon or new_max_lon < all_overlap_min_lon:
-                        note = note + " Maximum longitude has not changed. "
-                   else:
-                        all_overlap_max_lon = new_max_lon
-                        note = note + "Maximum longitude has changed successfully. "
-              except:
-                   note = note + " Maximum longitude has not changed. "
-
-         if option == '3':
-              screen.addstr(25, x/2, "Enter Temporal Gridding Option [Model or Observation]:")
-              new_temp_grid_option = screen.getstr()
-              if new_temp_grid_option.lower() == 'model':
-                   temp_grid_option = 'Model'
-                   temp_grid_setting = model_temp_res
-                   note = "Temporal gridding option has changed successfully to {0}".format(temp_grid_option)
-              elif new_temp_grid_option.lower() == 'observation':
-                   temp_grid_option = 'Observation'
-                   temp_grid_setting = obs_temp_res
-                   note = "Temporal gridding option has changed successfully to {0}".format(temp_grid_option)
-              else:
-                   note = "Temporal gridding option has not changed."
-
-         if option == '4':
-              screen.addstr(25, x/2, "Enter Spatial Gridding Option [Model, Observation or User]:")
-              new_spatial_grid_option = screen.getstr()
-              if new_spatial_grid_option.lower() == 'model':
-                   spatial_grid_option = 'Model'
-                   spatial_grid_setting_lat = model_lat_res
-                   spatial_grid_setting_lon = model_lon_res
-                   note = "Spatial gridding option has changed successfully to {0}".format(spatial_grid_option)
-              elif new_spatial_grid_option.lower() == 'observation':
-                   spatial_grid_option = 'Observation'
-                   spatial_grid_setting_lat = obs_lat_res
-                   spatial_grid_setting_lon = obs_lon_res
-                   note = "Spatial gridding option has changed successfully to {0}".format(spatial_grid_option)
-              elif new_spatial_grid_option.lower() == 'user':
-                   screen.addstr(26, x/2, "Please enter latitude spatial resolution: ")
-                   user_lat_res = screen.getstr()
-                   screen.addstr(27, x/2, "Please enter longitude spatial resolution: ")
-                   user_lon_res = screen.getstr()
-                   try:
-                        user_lat_res = float(user_lat_res)
-                        user_lon_res = float(user_lon_res)
-                        spatial_grid_option = 'User: resolution lat:{0}, lon:{1}'.format(str(user_lat_res), str(user_lon_res))
-                        spatial_grid_setting_lat = user_lat_res
-                        spatial_grid_setting_lon = user_lon_res
-                        note = "Spatial gridding option has changed successfully to user defined."
-                   except:
-                        note = "Spatial gridding option has not changed."
-              else:
-                   note = "Spatial gridding option has not changed."
-
-         if option == '5':
-              screen.addstr(25, x/2, "Model/s:")
-              for each in enumerate(models_dict):
-                   screen.addstr(26 + each[0], x/2 + 2, "{0}: {1}".format(each[1], models_dict[each[1]]['directory'].split("/")[-1]))
-              screen.addstr(26 + len(models_dict), x/2, "Observation/s:")
-              for each in enumerate(obs_dict):
-                   screen.addstr(27 + len(models_dict) + each[0], x/2 + 2, "{0}: {1} - ({2})".format(each[1], obs_dict[each[1]]['database'], obs_dict[each[1]]['unit']))
-              screen.addstr(27 + len(obs_dict) + len(models_dict), x/2, "Please select reference dataset:")
-              selected_reference = screen.getstr()
-              if selected_reference in models_dict:
-                   reference_dataset = selected_reference
-                   note = "Reference dataset successfully changed."
-              elif selected_reference in obs_dict:
-                   reference_dataset = selected_reference
-                   note = "Reference dataset successfully changed."
-              else:
-                   note = "Reference dataset did not change."
-
-         if option == '6':
-              screen.addstr(25, x/2, "Model/s:")
-              for each in enumerate(models_dict):
-                   screen.addstr(26 + each[0], x/2 + 2, "{0}: {1}".format(each[1], models_dict[each[1]]['directory'].split("/")[-1]))
-              screen.addstr(26 + len(models_dict), x/2, "Observation/s:")
-              for each in enumerate(obs_dict):
-                   screen.addstr(27 + len(models_dict) + each[0], x/2 + 2, "{0}: {1} - ({2})".format(each[1], obs_dict[each[1]]['database'], obs_dict[each[1]]['unit']))
-              screen.addstr(27 + len(obs_dict) + len(models_dict), x/2, "Please enter target dataset/s (comma separated for multi target):")
-              selected_target = screen.getstr()
-              selected_target = selected_target.split(",")
-              if selected_target != ['']:
-                   target_datasets = []
-                   for target in selected_target:
-                        if target in models_dict:
-                             target_datasets.append(target)
-                             note = "Target dataset successfully changed."
-                        elif target in obs_dict:
-                             target_datasets.append(target)
-                             note = "Target dataset successfully changed."
-                        else:
-                             note = "Target dataset did not change."
-
-         if option == '7':
-              screen.addstr(25, x/2, "Available metrics:")
-              for i in enumerate(sorted(metrics_dict, key=metrics_dict.get)):
-                   screen.addstr(26 + i[0], x/2 + 2, "[{0}] - {1}".format(i[1], metrics_dict[i[1]]))
-              screen.addstr(26 + len(metrics_dict), x/2, "Please select a metric:")
-              metric_id = screen.getstr()
-              if metric_id in metrics_dict:
-                   metric = metrics_dict[metric_id]
-                   note = "Metric sucessfully changed to {0}".format(metric)
-              else:
-                   note = "Metric has not changes"
-
-         if option == '8':
-              screen.addstr(25, x/2, "Please enter working directory path:")
-              working_directory = screen.getstr()
-              if working_directory:
-                   if working_directory[-1] != '/':
-                        working_directory = working_directory + "/"
-              else:
-                   note = "Working directory has not changed"
-
-         if option == '9':
-              screen.addstr(25, x/2, "Please enter plot title:")
-              plot_title = screen.getstr()
-
-         #if option == '10':
-         #     screen.addstr(25, x/2, "Please enter plot title:")
-         #     plot_title = screen.getstr()
-
-         if option == '9':
-              models_start_time, models_end_time = get_models_temp_bound()
-              line = 25
-              for i, model in enumerate(model_datasets):
-                   mode_name = models_info[i]['directory'].split("/")[-1]
-                   line += 1
-                   screen.addstr(line, x/2, "{0}".format(mode_name))
-                   line += 1
-                   screen.addstr(line, x/2 + 3, "Start:{0} - End:{1}".format(models_start_time[i], models_end_time[i]))
-
-              observations_start_time, observations_end_time = get_obs_temp_bound()
-              for i, obs in enumerate(observations_info):
-                   line += 1
-                   screen.addstr(line, x/2, "{0}".format(observations_info[i]['database']))
-                   line += 1
-                   screen.addstr(line, x/2 + 3, "Start:{0} - End:{1}".format(observations_start_time[i], observations_end_time[i]))
-              screen.getstr()
-
-         if option == '10':
-              models_bound = get_models_spatial_bound()
-              line = 25
-              for i, model in enumerate(model_datasets):
-                   mode_name = models_info[i]['directory'].split("/")[-1]
-                   line += 1
-                   screen.addstr(line, x/2, "{0}".format(mode_name))
-                   line += 1
-                   screen.addstr(line, x/2 + 3, "{0}".format(models_bound[i]))
-
-              observations_bound = get_obs_spatial_bound()
-              for i, obs in enumerate(observations_info):
-                   line += 1
-                   screen.addstr(line, x/2, "{0}".format(observations_info[i]['database']))
-                   line += 1
-                   screen.addstr(line, x/2 + 3, "{0}".format(observations_bound[i]))
-              screen.getstr()
-
-         if option.lower() == 'r':
-              note = run_screen(model_datasets, models_info, observations_info, all_overlap_start_time, all_overlap_end_time, \
-                         all_overlap_min_lat, all_overlap_max_lat, all_overlap_min_lon, all_overlap_max_lon, \
-                         temp_grid_setting, spatial_grid_setting_lat, spatial_grid_setting_lon, reference_dataset, target_datasets, metric, working_directory, plot_title)
-
-
-##############################################################
-#     Main Menu Screen
-##############################################################
-
-def main_menu(model_datasets, models_info, observation_datasets, observations_info, note=""):
-    '''This function Generates main menu page.
-
-    :param model_datasets: list of model dataset objects
-    :type model_datasets: list
-    :param models_info: list of dictionaries that contain information for each model
-    :type models_info: list
-    :param observation_datasets: list of observation dataset objects
-    :type observation_datasets: list
-    :param observations_info: list of dictionaries that contain information for each observation
-    :type observations_info: list
-    '''
-
-    option = ''
-    while option != '0':
-         ready_screen("main_menu", note)
-         model_status = "NC" if len(model_datasets) == 0 else "C"     #NC (Not Complete), if there is no model added, C (Complete) if model is added
-         obs_status = "NC" if len(observations_info) == 0 else "C"    #NC (Not Complete), if there is no observation added, C (Complete) if observation is added
-         screen.addstr(1, 1, "Main Menu:")
-         screen.addstr(4, 4, "1 - Manage Model ({0})".format(model_status))
-         screen.addstr(6, 4, "2 - Manage Observation ({0})".format(obs_status))
-         screen.addstr(8, 4, "3 - Run")
-         screen.addstr(10, 4, "0 - EXIT")
-         screen.addstr(16, 2, "Select an option: ")
-         screen.refresh()
-         option = screen.getstr()
-
-         if option == '1':
-              header = "Main Menu > Manage Model"
-              manage_model_screen(header)
-         if option == '2':
-              header = "Main Menu > Manage Observation"
-              manage_obs_screen(header)
-         if option == '3':
-              if model_status == 'NC' or obs_status == 'NC':
-                   main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: Please complete step 1 and 2 before 3.")
-              else:
-                   header = "Main Menu > Run"
-                   settings_screen(header)
-    curses.endwin()
-    sys.exit()
-
-
-if __name__ == '__main__':
-     TITLE = "RCMES CLI"
-     ORGANIZATION = "JPL/NASA - JIFRESSE/UCLA"
-     screen = curses.initscr()
-     model_datasets = []           #list of model dataset objects
-     models_info = []              #list of dictionaries that contain information for each model
-     observation_datasets = []     #list of observation dataset objects
-     observations_info = []        #list of dictionaries that contain information for each observation
-     main_menu(model_datasets, models_info, observation_datasets, observations_info)


[7/7] climate git commit: CLIMATE-720 - Revise file structure

Posted by hu...@apache.org.
CLIMATE-720 - Revise file structure

-A new folder, 'RCMES', is generated.
-Configuration files are moved into RCMES/configuration_files/
-cli_app.py is now in RCMES
-test.py is same as examples/knmi_to_cru31_full_bias.py


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/d9e3c7e7
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/d9e3c7e7
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/d9e3c7e7

Branch: refs/heads/master
Commit: d9e3c7e73939b2b39daeb85503cb6aa6e6f31ba0
Parents: 8bc19c6 868d154
Author: huikyole <hu...@argo.jpl.nasa.gov>
Authored: Thu Jan 21 13:51:09 2016 -0800
Committer: huikyole <hu...@argo.jpl.nasa.gov>
Committed: Thu Jan 21 13:51:09 2016 -0800

----------------------------------------------------------------------
 RCMES/cli_app.py                                | 1438 ++++++++++++++++++
 ...ordex-arctic_cloud_fraction_bias_to_SRB.yaml |   65 +
 .../cordex-arctic_rlds_bias_to_SRB.yaml         |   65 +
 .../cordex-arctic_rlus_bias_to_SRB.yaml         |   65 +
 .../cordex-arctic_rsds_bias_to_SRB.yaml         |   65 +
 .../NARCCAP_examples/Fig10_and_Fig11.yaml       |   81 +
 .../NARCCAP_examples/Fig12_summer.yaml          |   75 +
 .../NARCCAP_examples/Fig12_winter.yaml          |   75 +
 .../NARCCAP_examples/Fig14_and_Fig15.yaml       |   82 +
 .../NARCCAP_examples/Fig16_summer.yaml          |   75 +
 .../NARCCAP_examples/Fig16_winter.yaml          |   75 +
 .../NARCCAP_examples/Fig5_and_Fig6.yaml         |   50 +
 .../NARCCAP_examples/Fig7_summer.yaml           |   75 +
 .../NARCCAP_examples/Fig7_winter.yaml           |   75 +
 .../NARCCAP_examples/Fig8_and_Fig9.yaml         |   50 +
 RCMES/metrics_and_plots.py                      |  243 +++
 RCMES/run_RCMES.py                              |  246 +++
 RCMES/statistical_downscaling/MPI_tas_JJA.yaml  |   29 +
 .../run_statistical_downscaling.py              |  231 +++
 RCMES/test/test.py                              |  179 +++
 .../NARCCAP_paper/Fig10_and_Fig11.yaml          |   81 -
 .../NARCCAP_paper/Fig12_summer.yaml             |   75 -
 .../NARCCAP_paper/Fig12_winter.yaml             |   75 -
 .../NARCCAP_paper/Fig14_and_Fig15.yaml          |   82 -
 .../NARCCAP_paper/Fig16_summer.yaml             |   75 -
 .../NARCCAP_paper/Fig16_winter.yaml             |   75 -
 .../NARCCAP_paper/Fig5_and_Fig6.yaml            |   50 -
 .../NARCCAP_paper/Fig7_summer.yaml              |   75 -
 .../NARCCAP_paper/Fig7_winter.yaml              |   75 -
 .../NARCCAP_paper/Fig8_and_Fig9.yaml            |   50 -
 ...ia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml |   45 -
 ...ordex-AF_tasmax_annual_mean_bias_to_cru.yaml |   46 -
 ...ordex-arctic_cloud_fraction_bias_to_SRB.yaml |   65 -
 .../cordex-arctic_rlds_bias_to_SRB.yaml         |   65 -
 .../cordex-arctic_rlus_bias_to_SRB.yaml         |   65 -
 .../cordex-arctic_rsds_bias_to_SRB.yaml         |   65 -
 ...prec_subregion_annual_cycle_time_series.yaml |   90 --
 .../metrics_and_plots.py                        |  243 ---
 ...cap_prec_JJA_mean_taylor_diagram_to_cru.yaml |   44 -
 ...nterannual_variability_portrait_diagram.yaml |   75 -
 .../configuration_file_examples/run_RCMES.py    |  246 ---
 .../statistical_downscaling/MPI_tas_JJA.yaml    |   29 -
 .../run_statistical_downscaling.py              |  231 ---
 ocw-cli/cli_app.py                              | 1438 ------------------
 44 files changed, 3339 insertions(+), 3460 deletions(-)
----------------------------------------------------------------------



[3/7] climate git commit: CLIMATE-720 - Revise file structure

Posted by hu...@apache.org.
http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig12_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig12_winter.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig12_winter.yaml
new file mode 100644
index 0000000..f1f0b1e
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig12_winter.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_prec_DJF_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 12
+    month_end: 2
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ../data/prec*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig12_winter 
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig14_and_Fig15.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig14_and_Fig15.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig14_and_Fig15.yaml
new file mode 100644
index 0000000..5e01ce0
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig14_and_Fig15.yaml
@@ -0,0 +1,82 @@
+workdir: ./
+output_netcdf_filename: narccap_rsds_monthly_1984-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1984-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: False
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
+        variable: sw_sfc_dn
+  
+
+    targets:
+        data_source: local
+        path: ../data/rsds*ncep.monavg.nc                                                    
+        variable: rsds    
+
+number_of_metrics_and_plots: 2
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: Fig14
+    subplots_array: !!python/tuple [4,2]
+
+metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots2:
+    file_name: Fig15
+
+use_subregions: False
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig16_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig16_summer.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig16_summer.yaml
new file mode 100644
index 0000000..db33eff
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig16_summer.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_rsds_JJA_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1984-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 6
+    month_end: 8
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc
+        variable: sw_sfc_dn
+
+    targets:
+        data_source: local
+        path: ../data/rsds*ncep.monavg.nc                                                    
+        variable: rsds    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig16_summer
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig16_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig16_winter.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig16_winter.yaml
new file mode 100644
index 0000000..e25a4b2
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig16_winter.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_rsds_DJF_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1984-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 12
+    month_end: 2
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc
+        variable: sw_sfc_dn
+
+    targets:
+        data_source: local
+        path: ../data/rsds*ncep.monavg.nc                                                    
+        variable: rsds    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig16_winter
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig5_and_Fig6.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig5_and_Fig6.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig5_and_Fig6.yaml
new file mode 100644
index 0000000..ef7cc9c
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig5_and_Fig6.yaml
@@ -0,0 +1,50 @@
+workdir: ./                                      
+output_netcdf_filename: narccap_tas_annual_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: True  
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 38
+
+    targets:
+        data_source: local
+        path: ../data/temp.*ncep.monavg.nc                                                    
+        variable: temp    
+
+number_of_metrics_and_plots: 2
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: Fig5
+    subplots_array: !!python/tuple [4,2]
+
+metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots2:
+    file_name: Fig6
+
+use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig7_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig7_summer.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig7_summer.yaml
new file mode 100644
index 0000000..ddbce3b
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig7_summer.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_tas_JJA_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 6
+    month_end: 8
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 38
+
+    targets:
+        data_source: local
+        path: ../data/temp*ncep.monavg.nc                                                    
+        variable: temp    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig7_summer
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig7_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig7_winter.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig7_winter.yaml
new file mode 100644
index 0000000..38add9b
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig7_winter.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_tas_DJF_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 12
+    month_end: 2
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 38
+
+    targets:
+        data_source: local
+        path: ../data/temp*ncep.monavg.nc                                                    
+        variable: temp    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig7_winter 
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig8_and_Fig9.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig8_and_Fig9.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig8_and_Fig9.yaml
new file mode 100644
index 0000000..d25ecb6
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig8_and_Fig9.yaml
@@ -0,0 +1,50 @@
+workdir: ./                                      
+output_netcdf_filename: narccap_prec_annual_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: True  
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ../data/prec.*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 2
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: Fig8
+    subplots_array: !!python/tuple [4,2]
+
+metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots2:
+    file_name: Fig9
+
+use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/metrics_and_plots.py
----------------------------------------------------------------------
diff --git a/RCMES/metrics_and_plots.py b/RCMES/metrics_and_plots.py
new file mode 100644
index 0000000..6e00b0f
--- /dev/null
+++ b/RCMES/metrics_and_plots.py
@@ -0,0 +1,243 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#Apache OCW lib immports
+import ocw.dataset as ds
+import ocw.data_source.local as local
+import ocw.plotter as plotter
+import ocw.utils as utils
+from ocw.evaluation import Evaluation
+import ocw.metrics as metrics
+
+# Python libraries
+import numpy as np
+import numpy.ma as ma
+import matplotlib.pyplot as plt
+from mpl_toolkits.basemap import Basemap 
+from matplotlib import rcParams
+from matplotlib.patches import Polygon
+import string
+
+def Map_plot_bias_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
+                                      file_name, row, column, map_projection=None):
+    '''Draw maps of observed multi-year climatology and biases of models"'''
+
+    # calculate climatology of observation data
+    obs_clim = utils.calc_temporal_mean(obs_dataset)
+    # determine the metrics
+    map_of_bias = metrics.TemporalMeanBias()
+
+    # create the Evaluation object
+    bias_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
+                                 model_datasets, # list of target datasets for the evaluation
+                                 [map_of_bias, map_of_bias])
+    # run the evaluation (bias calculation)
+    bias_evaluation.run() 
+
+    rcm_bias = bias_evaluation.results[0]
+
+    fig = plt.figure()
+
+    lat_min = obs_dataset.lats.min()
+    lat_max = obs_dataset.lats.max()
+    lon_min = obs_dataset.lons.min()
+    lon_max = obs_dataset.lons.max()
+
+    string_list = list(string.ascii_lowercase) 
+    ax = fig.add_subplot(row,column,1)
+    if map_projection == 'npstere':
+        m = Basemap(ax=ax, projection ='npstere', boundinglat=lat_min, lon_0=0,
+            resolution = 'l', fix_aspect=False)
+    else:
+        m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
+            llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
+    lons, lats = np.meshgrid(obs_dataset.lons, obs_dataset.lats)
+
+    x,y = m(lons, lats)
+
+    m.drawcoastlines(linewidth=1)
+    m.drawcountries(linewidth=1)
+    m.drawstates(linewidth=0.5, color='w')
+    max = m.contourf(x,y,obs_clim,levels = plotter._nice_intervals(obs_dataset.values, 10), extend='both',cmap='rainbow')
+    ax.annotate('(a) \n' + obs_name,xy=(lon_min, lat_min))
+    cax = fig.add_axes([0.02, 1.-float(1./row), 0.01, 1./row*0.6])
+    plt.colorbar(max, cax = cax) 
+    clevs = plotter._nice_intervals(rcm_bias, 11)
+    for imodel in np.arange(len(model_datasets)):
+
+        ax = fig.add_subplot(row, column,2+imodel)
+        if map_projection == 'npstere':
+            m = Basemap(ax=ax, projection ='npstere', boundinglat=lat_min, lon_0=0,
+                resolution = 'l', fix_aspect=False)
+        else:
+            m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
+                llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
+        m.drawcoastlines(linewidth=1)
+        m.drawcountries(linewidth=1)
+        m.drawstates(linewidth=0.5, color='w')
+        max = m.contourf(x,y,rcm_bias[imodel,:],levels = clevs, extend='both', cmap='RdBu_r')
+        ax.annotate('('+string_list[imodel+1]+')  \n '+model_names[imodel],xy=(lon_min, lat_min))
+
+    cax = fig.add_axes([0.91, 0.1, 0.015, 0.8])
+    plt.colorbar(max, cax = cax) 
+
+    plt.subplots_adjust(hspace=0.01,wspace=0.05)
+
+    fig.savefig(file_name,dpi=600,bbox_inches='tight')
+
+def Taylor_diagram_spatial_pattern_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
+                                      file_name):
+
+    # calculate climatological mean fields
+    obs_clim_dataset = ds.Dataset(obs_dataset.lats, obs_dataset.lons, obs_dataset.times, utils.calc_temporal_mean(obs_dataset))
+    model_clim_datasets = []
+    for dataset in model_datasets:
+        model_clim_datasets.append(ds.Dataset(dataset.lats, dataset.lons, dataset.times, utils.calc_temporal_mean(dataset)))
+
+    # Metrics (spatial standard deviation and pattern correlation)
+    # determine the metrics
+    taylor_diagram = metrics.SpatialPatternTaylorDiagram()
+
+    # create the Evaluation object
+    taylor_evaluation = Evaluation(obs_clim_dataset, # Climatological mean of reference dataset for the evaluation
+                                 model_clim_datasets, # list of climatological means from model datasets for the evaluation
+                                 [taylor_diagram])
+
+    # run the evaluation (bias calculation)
+    taylor_evaluation.run() 
+
+    taylor_data = taylor_evaluation.results[0]
+
+    plotter.draw_taylor_diagram(taylor_data, model_names, obs_name, file_name, pos='upper right',frameon=False)
+
+def Time_series_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle, 
+                          file_name, row, column, x_tick=['']):
+
+    nmodel, nt, nregion = model_subregion_mean.shape  
+
+    if seasonal_cycle:
+        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
+        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
+        nt = 12
+    else:
+        obs_data = obs_subregion_mean
+        model_data = model_subregion_mean
+        
+    x_axis = np.arange(nt)
+    x_tick_values = x_axis
+
+    fig = plt.figure()
+    rcParams['xtick.labelsize'] = 6
+    rcParams['ytick.labelsize'] = 6
+  
+    for iregion in np.arange(nregion):
+        ax = fig.add_subplot(row, column, iregion+1) 
+        x_tick_labels = ['']
+        if iregion+1  > column*(row-1):
+            x_tick_labels = x_tick 
+        else:
+            x_tick_labels=['']
+        ax.plot(x_axis, obs_data[0, :, iregion], color='r', lw=2, label=obs_name)
+        for imodel in np.arange(nmodel):
+            ax.plot(x_axis, model_data[imodel, :, iregion], lw=0.5, label = model_names[imodel])
+        ax.set_xlim([-0.5,nt-0.5])
+        ax.set_xticks(x_tick_values)
+        ax.set_xticklabels(x_tick_labels)
+        ax.set_title('Region %02d' % (iregion+1), fontsize=8)
+    
+    ax.legend(bbox_to_anchor=(-0.2, row/2), loc='center' , prop={'size':7}, frameon=False)  
+
+    fig.subplots_adjust(hspace=0.7, wspace=0.5)
+    fig.savefig(file_name, dpi=600, bbox_inches='tight')
+
+def Portrait_diagram_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle,
+                               file_name, normalize=True):
+
+    nmodel, nt, nregion = model_subregion_mean.shape
+    
+    if seasonal_cycle:
+        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
+        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
+        nt = 12
+    else:
+        obs_data = obs_subregion_mean
+        model_data = model_subregion_mean
+
+    subregion_metrics = ma.zeros([4, nregion, nmodel])
+
+    for imodel in np.arange(nmodel):
+        for iregion in np.arange(nregion):
+            # First metric: bias
+            subregion_metrics[0, iregion, imodel] = metrics.calc_bias(model_data[imodel, :, iregion], obs_data[0, :, iregion], average_over_time = True)
+            # Second metric: standard deviation
+            subregion_metrics[1, iregion, imodel] = metrics.calc_stddev_ratio(model_data[imodel, :, iregion], obs_data[0, :, iregion])
+            # Third metric: RMSE
+            subregion_metrics[2, iregion, imodel] = metrics.calc_rmse(model_data[imodel, :, iregion], obs_data[0, :, iregion])
+            # Fourth metric: correlation
+            subregion_metrics[3, iregion, imodel] = metrics.calc_correlation(model_data[imodel, :, iregion], obs_data[0, :, iregion])
+   
+    if normalize:
+        for iregion in np.arange(nregion):
+            subregion_metrics[0, iregion, : ] = subregion_metrics[0, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
+            subregion_metrics[1, iregion, : ] = subregion_metrics[1, iregion, : ]*100. 
+            subregion_metrics[2, iregion, : ] = subregion_metrics[2, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
+
+    region_names = ['R%02d' % i for i in np.arange(nregion)+1]
+
+    for imetric, metric in enumerate(['bias','std','RMSE','corr']):
+        plotter.draw_portrait_diagram(subregion_metrics[imetric, :, :], region_names, model_names, file_name+'_'+metric, 
+                                      xlabel='model',ylabel='region')             
+
+def Map_plot_subregion(subregions, ref_dataset, directory):
+  
+    lons, lats = np.meshgrid(ref_dataset.lons, ref_dataset.lats) 
+    fig = plt.figure()
+    ax = fig.add_subplot(111)
+    m = Basemap(ax=ax, projection='cyl',llcrnrlat = lats.min(), urcrnrlat = lats.max(),
+                llcrnrlon = lons.min(), urcrnrlon = lons.max(), resolution = 'l')
+    m.drawcoastlines(linewidth=0.75)
+    m.drawcountries(linewidth=0.75)
+    m.etopo()  
+    x, y = m(lons, lats) 
+    #subregion_array = ma.masked_equal(subregion_array, 0)
+    #max=m.contourf(x, y, subregion_array, alpha=0.7, cmap='Accent')
+    for subregion in subregions:
+        draw_screen_poly(subregion[1], m, 'w') 
+        plt.annotate(subregion[0],xy=(0.5*(subregion[1][2]+subregion[1][3]), 0.5*(subregion[1][0]+subregion[1][1])), ha='center',va='center', fontsize=8) 
+    fig.savefig(directory+'map_subregion', bbox_inches='tight')
+
+def draw_screen_poly(boundary_array, m, linecolor='k'):
+
+    ''' Draw a polygon on a map
+
+    :param boundary_array: [lat_north, lat_south, lon_east, lon_west]
+    :param m   : Basemap object
+    '''
+
+    lats = [boundary_array[0], boundary_array[0], boundary_array[1], boundary_array[1]]
+    lons = [boundary_array[3], boundary_array[2], boundary_array[2], boundary_array[3]]
+    x, y = m( lons, lats )
+    xy = zip(x,y)
+    poly = Polygon( xy, facecolor='none',edgecolor=linecolor )
+    plt.gca().add_patch(poly)
+    
+    
+   
+
+    
+
+    

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/run_RCMES.py
----------------------------------------------------------------------
diff --git a/RCMES/run_RCMES.py b/RCMES/run_RCMES.py
new file mode 100644
index 0000000..1054446
--- /dev/null
+++ b/RCMES/run_RCMES.py
@@ -0,0 +1,246 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+#Apache OCW lib immports
+import ocw.dataset_processor as dsp
+import ocw.data_source.local as local
+import ocw.data_source.rcmed as rcmed
+import ocw.plotter as plotter
+import ocw.utils as utils
+from ocw.dataset import Bounds
+
+import matplotlib.pyplot as plt
+from matplotlib import rcParams
+import numpy as np
+import numpy.ma as ma
+import yaml
+from glob import glob
+import operator
+from dateutil import parser
+from datetime import datetime
+import os
+import sys
+
+from metrics_and_plots import *
+
+import ssl
+if hasattr(ssl, '_create_unverified_context'):
+  ssl._create_default_https_context = ssl._create_unverified_context
+
+config_file = str(sys.argv[1])
+
+print 'Reading the configuration file ', config_file
+config = yaml.load(open(config_file))
+time_info = config['time']
+temporal_resolution = time_info['temporal_resolution']
+
+start_time = datetime.strptime(time_info['start_time'].strftime('%Y%m%d'),'%Y%m%d')
+end_time = datetime.strptime(time_info['end_time'].strftime('%Y%m%d'),'%Y%m%d')
+
+space_info = config['space']
+min_lat = space_info['min_lat']
+max_lat = space_info['max_lat']
+min_lon = space_info['min_lon']
+max_lon = space_info['max_lon']
+
+""" Step 1: Load the reference data """
+ref_data_info = config['datasets']['reference']
+print 'Loading observation dataset:\n',ref_data_info
+ref_name = ref_data_info['data_name']
+if ref_data_info['data_source'] == 'local':
+    ref_dataset = local.load_file(ref_data_info['path'],
+                                  ref_data_info['variable'], name=ref_name)
+elif ref_data_info['data_source'] == 'rcmed':
+      ref_dataset = rcmed.parameter_dataset(ref_data_info['dataset_id'],
+                                            ref_data_info['parameter_id'],
+                                            min_lat, max_lat, min_lon, max_lon,
+                                            start_time, end_time)
+else:
+    print ' '
+    # TO DO: support ESGF
+
+ref_dataset =  dsp.normalize_dataset_datetimes(ref_dataset, temporal_resolution)
+if 'multiplying_factor' in ref_data_info.keys():
+    ref_dataset.values = ref_dataset.values*ref_data_info['multiplying_factor']
+
+""" Step 2: Load model NetCDF Files into OCW Dataset Objects """
+model_data_info = config['datasets']['targets']
+print 'Loading model datasets:\n',model_data_info
+if model_data_info['data_source'] == 'local':
+    model_datasets, model_names = local.load_multiple_files(file_path = model_data_info['path'],
+                                                            variable_name =model_data_info['variable'])
+else:
+    print ' '
+    # TO DO: support RCMED and ESGF
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.normalize_dataset_datetimes(dataset, temporal_resolution)
+
+""" Step 3: Subset the data for temporal and spatial domain """
+# Create a Bounds object to use for subsetting
+if time_info['maximum_overlap_period']:
+    start_time, end_time = utils.get_temporal_overlap([ref_dataset]+model_datasets)
+    print 'Maximum overlap period'
+    print 'start_time:', start_time
+    print 'end_time:', end_time
+
+if temporal_resolution == 'monthly' and end_time.day !=1:
+    end_time = end_time.replace(day=1)
+if ref_data_info['data_source'] == 'rcmed':
+    min_lat = np.max([min_lat, ref_dataset.lats.min()])
+    max_lat = np.min([max_lat, ref_dataset.lats.max()])
+    min_lon = np.max([min_lon, ref_dataset.lons.min()])
+    max_lon = np.min([max_lon, ref_dataset.lons.max()])
+bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
+
+if ref_dataset.lats.ndim !=2 and ref_dataset.lons.ndim !=2:
+    ref_dataset = dsp.subset(bounds,ref_dataset)
+else:
+    ref_dataset = dsp.temporal_slice(bounds.start, bounds.end, ref_dataset)
+for idata,dataset in enumerate(model_datasets):
+    if dataset.lats.ndim !=2 and dataset.lons.ndim !=2:
+        model_datasets[idata] = dsp.subset(bounds,dataset)
+    else:
+        model_datasets[idata] = dsp.temporal_slice(bounds.start, bounds.end, dataset)
+
+# Temporaly subset both observation and model datasets for the user specified season
+month_start = time_info['month_start']
+month_end = time_info['month_end']
+average_each_year = time_info['average_each_year']
+
+ref_dataset = dsp.temporal_subset(month_start, month_end,ref_dataset,average_each_year)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.temporal_subset(month_start, month_end,dataset,average_each_year)
+
+# generate grid points for regridding
+if config['regrid']['regrid_on_reference']:
+    new_lat = ref_dataset.lats
+    new_lon = ref_dataset.lons 
+else:
+    delta_lat = config['regrid']['regrid_dlat']
+    delta_lon = config['regrid']['regrid_dlon']
+    nlat = (max_lat - min_lat)/delta_lat+1
+    nlon = (max_lon - min_lon)/delta_lon+1
+    new_lat = np.linspace(min_lat, max_lat, nlat)
+    new_lon = np.linspace(min_lon, max_lon, nlon)
+
+# number of models
+nmodel = len(model_datasets)
+print 'Dataset loading completed'
+print 'Observation data:', ref_name 
+print 'Number of model datasets:',nmodel
+for model_name in model_names:
+    print model_name
+
+""" Step 4: Spatial regriding of the reference datasets """
+print 'Regridding datasets: ', config['regrid']
+if not config['regrid']['regrid_on_reference']:
+    ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon)
+    print 'Reference dataset has been regridded'
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon)
+    print model_names[idata]+' has been regridded'
+
+print 'Propagating missing data information'
+ref_dataset = dsp.mask_missing_data([ref_dataset]+model_datasets)[0]
+model_datasets = dsp.mask_missing_data([ref_dataset]+model_datasets)[1:]
+
+""" Step 5: Checking and converting variable units """
+print 'Checking and converting variable units'
+ref_dataset = dsp.variable_unit_conversion(ref_dataset)
+for idata,dataset in enumerate(model_datasets):
+    model_datasets[idata] = dsp.variable_unit_conversion(dataset)
+    
+
+print 'Generating multi-model ensemble'
+if len(model_datasets) >= 2.:
+    model_datasets.append(dsp.ensemble(model_datasets))
+    model_names.append('ENS')
+
+""" Step 6: Generate subregion average and standard deviation """
+if config['use_subregions']:
+    # sort the subregion by region names and make a list
+    subregions= sorted(config['subregions'].items(),key=operator.itemgetter(0))
+
+    # number of subregions
+    nsubregion = len(subregions)
+
+    print 'Calculating spatial averages and standard deviations of ',str(nsubregion),' subregions'
+
+    ref_subregion_mean, ref_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std([ref_dataset], subregions) 
+    model_subregion_mean, model_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std(model_datasets, subregions) 
+
+""" Step 7: Write a netCDF file """
+workdir = config['workdir']
+if workdir[-1] != '/':
+    workdir = workdir+'/'
+print 'Writing a netcdf file: ',workdir+config['output_netcdf_filename']
+if not os.path.exists(workdir):
+    os.system("mkdir "+workdir)
+
+if config['use_subregions']:
+    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets, model_names,
+                                                       path=workdir+config['output_netcdf_filename'],
+                                                       subregions=subregions, subregion_array = subregion_array, 
+                                                       ref_subregion_mean=ref_subregion_mean, ref_subregion_std=ref_subregion_std,
+                                                       model_subregion_mean=model_subregion_mean, model_subregion_std=model_subregion_std)
+else:
+    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets, model_names,
+                                                       path=workdir+config['output_netcdf_filename'])
+
+""" Step 8: Calculate metrics and draw plots """
+nmetrics = config['number_of_metrics_and_plots']
+if config['use_subregions']:
+    Map_plot_subregion(subregions, ref_dataset, workdir)
+
+if nmetrics > 0:
+    print 'Calculating metrics and generating plots'
+    for imetric in np.arange(nmetrics)+1:
+        metrics_name = config['metrics'+'%1d' %imetric]
+        plot_info = config['plots'+'%1d' %imetric]
+        file_name = workdir+plot_info['file_name']
+
+        print 'metrics '+str(imetric)+'/'+str(nmetrics)+': ', metrics_name
+        if metrics_name == 'Map_plot_bias_of_multiyear_climatology':
+            row, column = plot_info['subplots_array']
+            if 'map_projection' in plot_info.keys():
+                Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
+                                          file_name, row, column, map_projection=plot_info['map_projection'])
+            else:
+                Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
+                                          file_name, row, column)
+        elif metrics_name == 'Taylor_diagram_spatial_pattern_of_multiyear_climatology':
+            Taylor_diagram_spatial_pattern_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
+                                      file_name)
+        elif config['use_subregions']:
+            if metrics_name == 'Timeseries_plot_subregion_interannual_variability' and average_each_year:
+                row, column = plot_info['subplots_array']
+                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, False,
+                                      file_name, row, column, x_tick=['Y'+str(i+1) for i in np.arange(model_subregion_mean.shape[1])])
+            if metrics_name == 'Timeseries_plot_subregion_annual_cycle' and not average_each_year and month_start==1 and month_end==12:
+                row, column = plot_info['subplots_array']
+                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, True,
+                                      file_name, row, column, x_tick=['J','F','M','A','M','J','J','A','S','O','N','D'])
+            if metrics_name == 'Portrait_diagram_subregion_interannual_variability' and average_each_year:
+                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, False,
+                                      file_name)
+            if metrics_name == 'Portrait_diagram_subregion_annual_cycle' and not average_each_year and month_start==1 and month_end==12:
+                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, True,
+                                      file_name)
+        else:
+            print 'please check the currently supported metrics'
+
+

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/statistical_downscaling/MPI_tas_JJA.yaml
----------------------------------------------------------------------
diff --git a/RCMES/statistical_downscaling/MPI_tas_JJA.yaml b/RCMES/statistical_downscaling/MPI_tas_JJA.yaml
new file mode 100644
index 0000000..17a12a7
--- /dev/null
+++ b/RCMES/statistical_downscaling/MPI_tas_JJA.yaml
@@ -0,0 +1,29 @@
+case_name: MPI_tas_JJA
+
+# downscaling method (1: delta addition, 2: Delta correction, 3: quantile mapping, 4: asynchronous regression)
+downscaling_option: 4
+
+# longitude (-180 ~ 180) and latitude (-90 ~ 90) of the grid point to downscale model output [in degrees]
+location:
+    name: HoChiMinh_City 
+    grid_lat: 10.75    
+    grid_lon: 106.67   
+
+# Season (for December - February, month_start=12 & month_end =2; for June - August, month_start=6 & month_end = 8)
+month_index: !!python/tuple [6,7,8]
+
+# reference (observation) data
+reference:
+    data_source: local
+    data_name: CRU
+    path: ./data/observation/tas_cru_monthly_1981-2010.nc
+    variable: tas
+
+model:
+    data_name: MPI
+    variable: tas
+    present:
+        path: ./data/model_present/tas_Amon_MPI_decadal1980_198101-201012.nc
+    future:
+        scenario_name: RCP8.5_2041-70
+        path: ./data/model_rcp85/tas_Amon_MPI_rcp85_204101-207012.nc 

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/statistical_downscaling/run_statistical_downscaling.py
----------------------------------------------------------------------
diff --git a/RCMES/statistical_downscaling/run_statistical_downscaling.py b/RCMES/statistical_downscaling/run_statistical_downscaling.py
new file mode 100644
index 0000000..60c6ac2
--- /dev/null
+++ b/RCMES/statistical_downscaling/run_statistical_downscaling.py
@@ -0,0 +1,231 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import yaml
+import os
+import sys
+import xlwt
+
+import numpy as np
+import numpy.ma as ma
+
+import ocw.data_source.local as local
+import ocw.dataset as ds
+import ocw.dataset_processor as dsp
+import ocw.statistical_downscaling as down
+import ocw.plotter as plotter
+
+import ssl
+
+def spatial_aggregation(target_dataset, lon_min, lon_max, lat_min, lat_max):
+    """ Spatially subset a dataset within the given longitude and latitude boundaryd_lon-grid_space, grid_lon+grid_space
+    :param target_dataset: Dataset object that needs spatial subsetting
+    :type target_dataset: Open Climate Workbench Dataset Object
+    :param lon_min: minimum longitude (western boundary)
+    :type lon_min: float
+    :param lon_max: maximum longitude (eastern boundary)
+    :type lon_min: float
+    :param lat_min: minimum latitude (southern boundary) 
+    :type lat_min: float
+    :param lat_min: maximum latitude (northern boundary) 
+    :type lat_min: float
+    :returns: A new spatially subset Dataset
+    :rtype: Open Climate Workbench Dataset Object
+    """
+
+    if target_dataset.lons.ndim == 1 and target_dataset.lats.ndim == 1:
+        new_lon, new_lat = np.meshgrid(target_dataset.lons, target_dataset.lats)
+    elif target_dataset.lons.ndim == 2 and target_dataset.lats.ndim == 2:
+        new_lon = target_datasets.lons
+        new_lat = target_datasets.lats
+ 
+    y_index, x_index = np.where((new_lon >= lon_min) & (new_lon <= lon_max) & (new_lat >= lat_min) & (new_lat <= lat_max))[0:2]
+
+    #new_dataset = ds.Dataset(target_dataset.lats[y_index.min():y_index.max()+1],
+    #                         target_dataset.lons[x_index.min():x_index.max()+1],
+    #                         target_dataset.times,
+    #                         target_dataset.values[:,y_index.min():y_index.max()+1,x_index.min():x_index.max()+1],
+    #                         target_dataset.variable,
+    #                         target_dataset.name) 
+    return target_dataset.values[:,y_index.min():y_index.max()+1,x_index.min():x_index.max()+1]
+
+def extract_data_at_nearest_grid_point(target_dataset, longitude, latitude):
+    """ Spatially subset a dataset within the given longitude and latitude boundaryd_lon-grid_space, grid_lon+grid_space
+    :param target_dataset: Dataset object that needs spatial subsetting
+    :type target_dataset: Open Climate Workbench Dataset Object
+    :type longitude: float
+    :param longitude: longitude
+    :type latitude: float
+    :param latitude: latitude 
+    :returns: A new spatially subset Dataset
+    :rtype: Open Climate Workbench Dataset Object
+    """
+
+    if target_dataset.lons.ndim == 1 and target_dataset.lats.ndim == 1:
+        new_lon, new_lat = np.meshgrid(target_dataset.lons, target_dataset.lats)
+    elif target_dataset.lons.ndim == 2 and target_dataset.lats.ndim == 2:
+        new_lon = target_datasets.lons
+        new_lat = target_datasets.lats
+    distance = (new_lon - longitude)**2. + (new_lat - latitude)**2.
+    y_index, x_index = np.where(distance == np.min(distance))[0:2]
+
+    return target_dataset.values[:,y_index[0], x_index[0]]
+
+if hasattr(ssl, '_create_unverified_context'):
+  ssl._create_default_https_context = ssl._create_unverified_context
+
+config_file = str(sys.argv[1])
+
+print 'Reading the configuration file ', config_file
+
+config = yaml.load(open(config_file))
+
+case_name = config['case_name']
+
+downscale_option_names = [' ','delta_addition','delta_correction','quantile_mapping','asynchronous_regression']
+DOWNSCALE_OPTION = config['downscaling_option']
+
+location = config['location']
+grid_lat = location['grid_lat']
+grid_lon = location['grid_lon']
+
+month_index = config['month_index']
+month_start = month_index[0]
+month_end = month_index[-1]    
+
+ref_info = config['reference']
+model_info = config['model']
+
+# Filename for the output data/plot (without file extension)
+OUTPUT = "%s_%s_%s_%s_%s" %(location['name'], ref_info['variable'], model_info['data_name'], ref_info['data_name'],model_info['future']['scenario_name'])
+
+print("Processing "+ ref_info['data_name'] + "  data")
+""" Step 1: Load Local NetCDF Files into OCW Dataset Objects """
+
+print("Loading %s into an OCW Dataset Object" % (ref_info['path'],))
+ref_dataset = local.load_file(ref_info['path'], ref_info['variable'])
+print(ref_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (ref_dataset.values.shape,))
+
+print("Loading %s into an OCW Dataset Object" % (model_info['present']['path'],))
+model_dataset_present = local.load_file(model_info['present']['path'], model_info['variable'])
+print(model_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (model_dataset_present.values.shape,))
+dy = model_dataset_present.spatial_resolution()[0]
+dx = model_dataset_present.spatial_resolution()[1]
+
+model_dataset_future = local.load_file(model_info['future']['path'], model_info['variable'])
+print(model_info['future']['scenario_name']+':'+model_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (model_dataset_future.values.shape,))
+
+""" Step 2: Temporal subsetting """
+print("Temporal subsetting for the selected month(s)")
+ref_temporal_subset = dsp.temporal_subset(month_start, month_end, ref_dataset)
+model_temporal_subset_present = dsp.temporal_subset(month_start, month_end, model_dataset_present)
+model_temporal_subset_future = dsp.temporal_subset(month_start, month_end, model_dataset_future)
+
+""" Step 3: Spatial aggregation of observational data into the model grid """
+print("Spatial aggregation of observational data near latitude %0.2f and longitude %0.2f " % (grid_lat, grid_lon))
+# There are two options to aggregate observational data near a model grid point
+#ref_subset = spatial_aggregation(ref_temporal_subset, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
+#model_subset_present = spatial_aggregation(model_temporal_subset_present, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
+#model_subset_future = spatial_aggregation(model_temporal_subset_future, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
+ref_subset = extract_data_at_nearest_grid_point(ref_temporal_subset, grid_lon, grid_lat)
+model_subset_present = extract_data_at_nearest_grid_point(model_temporal_subset_present, grid_lon, grid_lat)
+model_subset_future = extract_data_at_nearest_grid_point(model_temporal_subset_future, grid_lon, grid_lat)
+
+
+""" Step 4:  Create a statistical downscaling object and downscaling model output """
+# You can add other methods
+print("Creating a statistical downscaling object")
+
+downscale = down.Downscaling(ref_subset, model_subset_present, model_subset_future)
+
+print(downscale_option_names[DOWNSCALE_OPTION]+": Downscaling model output")
+
+if DOWNSCALE_OPTION == 1:
+    downscaled_model_present, downscaled_model_future = downscale.Delta_addition()
+elif DOWNSCALE_OPTION == 2:
+    downscaled_model_present, downscaled_model_future = downscale.Delta_correction()
+elif DOWNSCALE_OPTION == 3:
+    downscaled_model_present, downscaled_model_future = downscale.Quantile_mapping()
+elif DOWNSCALE_OPTION == 4:
+    downscaled_model_present, downscaled_model_future = downscale.Asynchronous_regression()
+else:
+    sys.exit("DOWNSCALE_OPTION must be an integer between 1 and 4")
+
+
+""" Step 5: Create plots and spreadsheet """
+print("Plotting results")
+if not os.path.exists(case_name):
+    os.system("mkdir "+case_name)
+os.chdir(os.getcwd()+"/"+case_name)
+
+plotter.draw_marker_on_map(grid_lat, grid_lon, fname='downscaling_location', location_name=config['location']['name'])
+
+plotter.draw_histogram([ref_subset.ravel(), model_subset_present.ravel(), model_subset_future.ravel()], 
+                       data_names = [ref_info['data_name'], model_info['data_name'], model_info['future']['scenario_name']],
+                       fname=OUTPUT+'_original')
+                        
+plotter.draw_histogram([ref_subset.ravel(), downscaled_model_present, downscaled_model_future], 
+                       data_names = [ref_info['data_name'], model_info['data_name'], model_info['future']['scenario_name']],
+                       fname=OUTPUT+'_downscaled_using_'+downscale_option_names[DOWNSCALE_OPTION])
+
+print("Generating spreadsheet")
+
+workbook = xlwt.Workbook()
+sheet = workbook.add_sheet(downscale_option_names[config['downscaling_option']])
+
+sheet.write(0, 0, config['location']['name'])
+sheet.write(0, 2, 'longitude')
+sheet.write(0, 4, 'latitude')
+sheet.write(0, 6, 'month')
+
+
+sheet.write(0, 3, grid_lon)
+sheet.write(0, 5, grid_lat)
+
+
+
+for imonth,month in enumerate(month_index):
+    sheet.write(0, 7+imonth, month)
+
+sheet.write(3, 1, 'observation')
+sheet.write(4, 1, ref_info['data_name'])
+for idata, data in enumerate(ref_subset.ravel()[~ref_subset.ravel().mask]):
+    sheet.write(5+idata,1,data.item())
+
+sheet.write(3, 2, 'original')
+sheet.write(4, 2, model_info['data_name'])
+for idata, data in enumerate(model_subset_present.ravel()):
+    sheet.write(5+idata,2,data.item())
+
+sheet.write(3, 3, 'original')
+sheet.write(4, 3, model_info['future']['scenario_name'])
+for idata, data in enumerate(model_subset_future.ravel()):
+    sheet.write(5+idata,3,data.item())
+
+sheet.write(3, 4, 'downscaled')
+sheet.write(4, 4, model_info['data_name'])
+for idata, data in enumerate(downscaled_model_present):
+    sheet.write(5+idata,4,data.item())
+
+sheet.write(3, 5, 'downscaled')
+sheet.write(4, 5, model_info['future']['scenario_name'])
+for idata, data in enumerate(downscaled_model_future):
+    sheet.write(5+idata,5,data.item())
+
+workbook.save(OUTPUT+'.xls')
+

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/test/test.py
----------------------------------------------------------------------
diff --git a/RCMES/test/test.py b/RCMES/test/test.py
new file mode 100644
index 0000000..beab16f
--- /dev/null
+++ b/RCMES/test/test.py
@@ -0,0 +1,179 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import urllib
+from os import path
+
+import numpy as np
+
+import ocw.data_source.local as local
+import ocw.data_source.rcmed as rcmed
+from ocw.dataset import Bounds as Bounds
+import ocw.dataset_processor as dsp
+import ocw.evaluation as evaluation
+import ocw.metrics as metrics
+import ocw.plotter as plotter
+import ssl
+
+if hasattr(ssl, '_create_unverified_context'):
+  ssl._create_default_https_context = ssl._create_unverified_context
+
+# File URL leader
+FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
+# This way we can easily adjust the time span of the retrievals
+YEARS = 3
+# Two Local Model Files 
+MODEL = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc"
+# Filename for the output image/plot (without file extension)
+OUTPUT_PLOT = "cru_31_tmax_knmi_africa_bias_full"
+
+# Download necessary NetCDF file if not present
+if path.exists(MODEL):
+    pass
+else:
+    urllib.urlretrieve(FILE_LEADER + MODEL, MODEL)
+
+""" Step 1: Load Local NetCDF File into OCW Dataset Objects """
+print("Loading %s into an OCW Dataset Object" % (MODEL,))
+knmi_dataset = local.load_file(MODEL, "tasmax")
+print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
+
+""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
+print("Working with the rcmed interface to get CRU3.1 Daily-Max Temp")
+metadata = rcmed.get_parameters_metadata()
+
+cru_31 = [m for m in metadata if m['parameter_id'] == "39"][0]
+
+""" The RCMED API uses the following function to query, subset and return the 
+raw data from the database:
+
+rcmed.parameter_dataset(dataset_id, parameter_id, min_lat, max_lat, min_lon, 
+                        max_lon, start_time, end_time)
+
+The first two required params are in the cru_31 variable we defined earlier
+"""
+# Must cast to int since the rcmed api requires ints
+dataset_id = int(cru_31['dataset_id'])
+parameter_id = int(cru_31['parameter_id'])
+
+print("We are going to use the Model to constrain the Spatial Domain")
+#  The spatial_boundaries() function returns the spatial extent of the dataset
+print("The KNMI_Dataset spatial bounds (min_lat, max_lat, min_lon, max_lon) are: \n"
+      "%s\n" % (knmi_dataset.spatial_boundaries(), ))
+print("The KNMI_Dataset spatial resolution (lat_resolution, lon_resolution) is: \n"
+      "%s\n\n" % (knmi_dataset.spatial_resolution(), ))
+min_lat, max_lat, min_lon, max_lon = knmi_dataset.spatial_boundaries()
+
+print("Calculating the Maximum Overlap in Time for the datasets")
+
+cru_start = datetime.datetime.strptime(cru_31['start_date'], "%Y-%m-%d")
+cru_end = datetime.datetime.strptime(cru_31['end_date'], "%Y-%m-%d")
+knmi_start, knmi_end = knmi_dataset.time_range()
+# Grab the Max Start Time
+start_time = max([cru_start, knmi_start])
+# Grab the Min End Time
+end_time = min([cru_end, knmi_end])
+print("Overlap computed to be: %s to %s" % (start_time.strftime("%Y-%m-%d"),
+                                          end_time.strftime("%Y-%m-%d")))
+print("We are going to grab the first %s year(s) of data" % YEARS)
+end_time = datetime.datetime(start_time.year + YEARS, start_time.month, start_time.day)
+print("Final Overlap is: %s to %s" % (start_time.strftime("%Y-%m-%d"),
+                                          end_time.strftime("%Y-%m-%d")))
+
+print("Fetching data from RCMED...")
+cru31_dataset = rcmed.parameter_dataset(dataset_id,
+                                        parameter_id,
+                                        min_lat,
+                                        max_lat,
+                                        min_lon,
+                                        max_lon,
+                                        start_time,
+                                        end_time)
+
+""" Step 3: Resample Datasets so they are the same shape """
+print("CRU31_Dataset.values shape: (times, lats, lons) - %s" % (cru31_dataset.values.shape,))
+print("KNMI_Dataset.values shape: (times, lats, lons) - %s" % (knmi_dataset.values.shape,))
+print("Our two datasets have a mis-match in time. We will subset on time to %s years\n" % YEARS)
+
+# Create a Bounds object to use for subsetting
+new_bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
+knmi_dataset = dsp.subset(new_bounds, knmi_dataset)
+
+print("CRU31_Dataset.values shape: (times, lats, lons) - %s" % (cru31_dataset.values.shape,))
+print("KNMI_Dataset.values shape: (times, lats, lons) - %s \n" % (knmi_dataset.values.shape,))
+
+print("Temporally Rebinning the Datasets to a Single Timestep")
+# To run FULL temporal Rebinning use a timedelta > 366 days.  I used 999 in this example
+knmi_dataset = dsp.temporal_rebin(knmi_dataset, datetime.timedelta(days=999))
+cru31_dataset = dsp.temporal_rebin(cru31_dataset, datetime.timedelta(days=999))
+
+print("KNMI_Dataset.values shape: %s" % (knmi_dataset.values.shape,))
+print("CRU31_Dataset.values shape: %s \n\n" % (cru31_dataset.values.shape,))
+ 
+""" Spatially Regrid the Dataset Objects to a 1/2 degree grid """
+# Using the bounds we will create a new set of lats and lons on 0.5 degree step
+new_lons = np.arange(min_lon, max_lon, 0.5)
+new_lats = np.arange(min_lat, max_lat, 0.5)
+ 
+# Spatially regrid datasets using the new_lats, new_lons numpy arrays
+print("Spatially Regridding the KNMI_Dataset...")
+knmi_dataset = dsp.spatial_regrid(knmi_dataset, new_lats, new_lons)
+print("Spatially Regridding the CRU31_Dataset...")
+cru31_dataset = dsp.spatial_regrid(cru31_dataset, new_lats, new_lons)
+print("Final shape of the KNMI_Dataset:%s" % (knmi_dataset.values.shape, ))
+print("Final shape of the CRU31_Dataset:%s" % (cru31_dataset.values.shape, ))
+ 
+""" Step 4:  Build a Metric to use for Evaluation - Bias for this example """
+# You can build your own metrics, but OCW also ships with some common metrics
+print("Setting up a Bias metric to use for evaluation")
+bias = metrics.Bias()
+
+""" Step 5: Create an Evaluation Object using Datasets and our Metric """
+# The Evaluation Class Signature is:
+# Evaluation(reference, targets, metrics, subregions=None)
+# Evaluation can take in multiple targets and metrics, so we need to convert
+# our examples into Python lists.  Evaluation will iterate over the lists
+print("Making the Evaluation definition")
+bias_evaluation = evaluation.Evaluation(knmi_dataset, [cru31_dataset], [bias])
+print("Executing the Evaluation using the object's run() method")
+bias_evaluation.run()
+ 
+""" Step 6: Make a Plot from the Evaluation.results """
+# The Evaluation.results are a set of nested lists to support many different
+# possible Evaluation scenarios.
+#
+# The Evaluation results docs say:
+# The shape of results is (num_metrics, num_target_datasets) if no subregion
+# Accessing the actual results when we have used 1 metric and 1 dataset is
+# done this way:
+print("Accessing the Results of the Evaluation run")
+results = bias_evaluation.results[0][0,:]
+ 
+# From the bias output I want to make a Contour Map of the region
+print("Generating a contour map using ocw.plotter.draw_contour_map()")
+ 
+lats = new_lats
+lons = new_lons
+fname = OUTPUT_PLOT
+gridshape = (1, 1)  # Using a 1 x 1 since we have a single Bias for the full time range
+plot_title = "TASMAX Bias of KNMI Compared to CRU 3.1 (%s - %s)" % (start_time.strftime("%Y/%d/%m"), end_time.strftime("%Y/%d/%m"))
+sub_titles = ["Full Temporal Range"]
+ 
+plotter.draw_contour_map(results, lats, lons, fname,
+                         gridshape=gridshape, ptitle=plot_title, 
+                         subtitles=sub_titles)

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig10_and_Fig11.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig10_and_Fig11.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig10_and_Fig11.yaml
deleted file mode 100644
index 0650e61..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig10_and_Fig11.yaml
+++ /dev/null
@@ -1,81 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_prec_monthly_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ../data/prec*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 2
-
-metrics1: Timeseries_plot_subregion_annual_cycle
-
-plots1:
-    file_name: Fig10
-    subplots_array: !!python/tuple [7,2]
-
-metrics2: Portrait_diagram_subregion_annual_cycle                
-
-plots2:
-    file_name: Fig11
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig12_summer.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig12_summer.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig12_summer.yaml
deleted file mode 100644
index f11c136..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig12_summer.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_prec_JJA_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ../data/prec*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig12_summer
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig12_winter.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig12_winter.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig12_winter.yaml
deleted file mode 100644
index f1f0b1e..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig12_winter.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_prec_DJF_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ../data/prec*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig12_winter 
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig14_and_Fig15.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig14_and_Fig15.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig14_and_Fig15.yaml
deleted file mode 100644
index 5e01ce0..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig14_and_Fig15.yaml
+++ /dev/null
@@ -1,82 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_rsds_monthly_1984-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1984-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
-        variable: sw_sfc_dn
-  
-
-    targets:
-        data_source: local
-        path: ../data/rsds*ncep.monavg.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 2
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: Fig14
-    subplots_array: !!python/tuple [4,2]
-
-metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots2:
-    file_name: Fig15
-
-use_subregions: False
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig16_summer.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig16_summer.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig16_summer.yaml
deleted file mode 100644
index db33eff..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig16_summer.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_rsds_JJA_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1984-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc
-        variable: sw_sfc_dn
-
-    targets:
-        data_source: local
-        path: ../data/rsds*ncep.monavg.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig16_summer
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig16_winter.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig16_winter.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig16_winter.yaml
deleted file mode 100644
index e25a4b2..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig16_winter.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_rsds_DJF_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1984-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc
-        variable: sw_sfc_dn
-
-    targets:
-        data_source: local
-        path: ../data/rsds*ncep.monavg.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig16_winter
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig5_and_Fig6.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig5_and_Fig6.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig5_and_Fig6.yaml
deleted file mode 100644
index ef7cc9c..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig5_and_Fig6.yaml
+++ /dev/null
@@ -1,50 +0,0 @@
-workdir: ./                                      
-output_netcdf_filename: narccap_tas_annual_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: True  
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ../data/temp.*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 2
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: Fig5
-    subplots_array: !!python/tuple [4,2]
-
-metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots2:
-    file_name: Fig6
-
-use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig7_summer.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig7_summer.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig7_summer.yaml
deleted file mode 100644
index ddbce3b..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig7_summer.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_tas_JJA_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ../data/temp*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig7_summer
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig7_winter.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig7_winter.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig7_winter.yaml
deleted file mode 100644
index 38add9b..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig7_winter.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_tas_DJF_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ../data/temp*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig7_winter 
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/NARCCAP_paper/Fig8_and_Fig9.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/NARCCAP_paper/Fig8_and_Fig9.yaml b/examples/configuration_file_examples/NARCCAP_paper/Fig8_and_Fig9.yaml
deleted file mode 100644
index d25ecb6..0000000
--- a/examples/configuration_file_examples/NARCCAP_paper/Fig8_and_Fig9.yaml
+++ /dev/null
@@ -1,50 +0,0 @@
-workdir: ./                                      
-output_netcdf_filename: narccap_prec_annual_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: True  
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ../data/prec.*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 2
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: Fig8
-    subplots_array: !!python/tuple [4,2]
-
-metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots2:
-    file_name: Fig9
-
-use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml b/examples/configuration_file_examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
deleted file mode 100644
index 276e744..0000000
--- a/examples/configuration_file_examples/cmip5_SE_Asia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml
+++ /dev/null
@@ -1,45 +0,0 @@
-workdir: ./
-output_netcdf_filename: cmip5_SE_Asia_prec_DJF_1998-2010.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True  
-    start_time: 1981-01-01
-    end_time: 2010-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: False  
-
-space:
-    min_lat: -15.14
-    max_lat: 27.26
-    min_lon: 89.26  
-    max_lon: 146.96
-
-regrid:
-    regrid_on_reference: True  
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: TRMM
-        dataset_id: 3
-        parameter_id: 36
-
-    targets:
-        data_source: local
-        path: ./data/pr_Amon*                                   
-        variable: pr    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots1:
-    file_name: cmip5_SE_ASIA_prec_DJF_mean_taylor_diagram_to_TRMM
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml b/examples/configuration_file_examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
deleted file mode 100644
index b1bbb78..0000000
--- a/examples/configuration_file_examples/cordex-AF_tasmax_annual_mean_bias_to_cru.yaml
+++ /dev/null
@@ -1,46 +0,0 @@
-workdir: ./
-output_netcdf_filename: cordex-AF_CRU_taxmax_monthly_1990-2007.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: -45.76
-    max_lat: 42.24
-    min_lon: -24.64
-    max_lon: 60.28
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 39
-
-    targets:
-        data_source: local
-        path: ./data/AFRICA*tasmax.nc                                                    
-        variable: tasmax  
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-AF_tasmax_annual_mean_bias_to_cru
-    subplots_array: !!python/tuple [3,4] 
-
-use_subregions: False
-


[6/7] climate git commit: Folders with old names have been removed

Posted by hu...@apache.org.
Folders with old names have been removed


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/868d154d
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/868d154d
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/868d154d

Branch: refs/heads/master
Commit: 868d154de1768aa02f85297a11e31ff816f6b07e
Parents: 43cdfd6
Author: huikyole <hu...@jpl.nasa.gov>
Authored: Thu Jan 21 13:08:55 2016 -0800
Committer: huikyole <hu...@jpl.nasa.gov>
Committed: Thu Jan 21 13:08:55 2016 -0800

----------------------------------------------------------------------
 ...ordex-arctic_cloud_fraction_bias_to_SRB.yaml | 65 ----------------
 .../cordex-arctic_rlds_bias_to_SRB.yaml         | 65 ----------------
 .../cordex-arctic_rlus_bias_to_SRB.yaml         | 65 ----------------
 .../cordex-arctic_rsds_bias_to_SRB.yaml         | 65 ----------------
 .../NARCCAP_paper/Fig10_and_Fig11.yaml          | 81 -------------------
 .../NARCCAP_paper/Fig12_summer.yaml             | 75 ------------------
 .../NARCCAP_paper/Fig12_winter.yaml             | 75 ------------------
 .../NARCCAP_paper/Fig14_and_Fig15.yaml          | 82 --------------------
 .../NARCCAP_paper/Fig16_summer.yaml             | 75 ------------------
 .../NARCCAP_paper/Fig16_winter.yaml             | 75 ------------------
 .../NARCCAP_paper/Fig5_and_Fig6.yaml            | 50 ------------
 .../NARCCAP_paper/Fig7_summer.yaml              | 75 ------------------
 .../NARCCAP_paper/Fig7_winter.yaml              | 75 ------------------
 .../NARCCAP_paper/Fig8_and_Fig9.yaml            | 50 ------------
 14 files changed, 973 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_cloud_fraction_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
deleted file mode 100644
index eb4b4c5..0000000
--- a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_clt_MAR-SEP.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 3
-    month_end: 9
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
-        variable: cld_frac
-        multiplying_factor: 100.0
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/clt*.nc                                                    
-        variable: clt     
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_clt_MAR-SEP_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlds_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlds_bias_to_SRB.yaml
deleted file mode 100644
index 1311843..0000000
--- a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlds_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rlds_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
-        variable: lw_sfc_dn
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rlds*.nc                                                    
-        variable: rlds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rlds_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [1,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlus_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlus_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlus_bias_to_SRB.yaml
deleted file mode 100644
index b03738a..0000000
--- a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlus_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rlus_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
-        variable: lw_sfc_up
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rlus*.nc                                                    
-        variable: rlus    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rlus_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rsds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rsds_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rsds_bias_to_SRB.yaml
deleted file mode 100644
index 9613e46..0000000
--- a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rsds_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rsds_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
-        variable: sw_sfc_dn
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rsds*.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rsds_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig10_and_Fig11.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig10_and_Fig11.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig10_and_Fig11.yaml
deleted file mode 100644
index 0650e61..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig10_and_Fig11.yaml
+++ /dev/null
@@ -1,81 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_prec_monthly_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ../data/prec*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 2
-
-metrics1: Timeseries_plot_subregion_annual_cycle
-
-plots1:
-    file_name: Fig10
-    subplots_array: !!python/tuple [7,2]
-
-metrics2: Portrait_diagram_subregion_annual_cycle                
-
-plots2:
-    file_name: Fig11
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig12_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig12_summer.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig12_summer.yaml
deleted file mode 100644
index f11c136..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig12_summer.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_prec_JJA_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ../data/prec*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig12_summer
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig12_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig12_winter.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig12_winter.yaml
deleted file mode 100644
index f1f0b1e..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig12_winter.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_prec_DJF_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ../data/prec*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig12_winter 
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig14_and_Fig15.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig14_and_Fig15.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig14_and_Fig15.yaml
deleted file mode 100644
index 5e01ce0..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig14_and_Fig15.yaml
+++ /dev/null
@@ -1,82 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_rsds_monthly_1984-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1984-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
-        variable: sw_sfc_dn
-  
-
-    targets:
-        data_source: local
-        path: ../data/rsds*ncep.monavg.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 2
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: Fig14
-    subplots_array: !!python/tuple [4,2]
-
-metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots2:
-    file_name: Fig15
-
-use_subregions: False
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig16_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig16_summer.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig16_summer.yaml
deleted file mode 100644
index db33eff..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig16_summer.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_rsds_JJA_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1984-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc
-        variable: sw_sfc_dn
-
-    targets:
-        data_source: local
-        path: ../data/rsds*ncep.monavg.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig16_summer
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig16_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig16_winter.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig16_winter.yaml
deleted file mode 100644
index e25a4b2..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig16_winter.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_rsds_DJF_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1984-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc
-        variable: sw_sfc_dn
-
-    targets:
-        data_source: local
-        path: ../data/rsds*ncep.monavg.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig16_winter
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig5_and_Fig6.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig5_and_Fig6.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig5_and_Fig6.yaml
deleted file mode 100644
index ef7cc9c..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig5_and_Fig6.yaml
+++ /dev/null
@@ -1,50 +0,0 @@
-workdir: ./                                      
-output_netcdf_filename: narccap_tas_annual_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: True  
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ../data/temp.*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 2
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: Fig5
-    subplots_array: !!python/tuple [4,2]
-
-metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots2:
-    file_name: Fig6
-
-use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig7_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig7_summer.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig7_summer.yaml
deleted file mode 100644
index ddbce3b..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig7_summer.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_tas_JJA_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ../data/temp*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig7_summer
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig7_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig7_winter.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig7_winter.yaml
deleted file mode 100644
index 38add9b..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig7_winter.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_tas_DJF_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ../data/temp*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: Fig7_winter 
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/868d154d/RCMES/configuration_files/NARCCAP_paper/Fig8_and_Fig9.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig8_and_Fig9.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig8_and_Fig9.yaml
deleted file mode 100644
index d25ecb6..0000000
--- a/RCMES/configuration_files/NARCCAP_paper/Fig8_and_Fig9.yaml
+++ /dev/null
@@ -1,50 +0,0 @@
-workdir: ./                                      
-output_netcdf_filename: narccap_prec_annual_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: True  
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ../data/prec.*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 2
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: Fig8
-    subplots_array: !!python/tuple [4,2]
-
-metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots2:
-    file_name: Fig9
-
-use_subregions: False


[4/7] climate git commit: CLIMATE-720 - Revise file structure

Posted by hu...@apache.org.
CLIMATE-720 - Revise file structure

- A new folder, 'RCMES', is generated.
- Configuration files are moved into RCMES/configuration_files/
- cli_app.py is now in RCMES
- test.py is same as examples/knmi_to_cru31_full_bias.py


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/c6c9dd1c
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/c6c9dd1c
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/c6c9dd1c

Branch: refs/heads/master
Commit: c6c9dd1c59469b3a22104d880f07e0691a9289b4
Parents: 8bc19c6
Author: huikyole <hu...@jpl.nasa.gov>
Authored: Wed Jan 20 11:02:12 2016 -0800
Committer: huikyole <hu...@jpl.nasa.gov>
Committed: Wed Jan 20 11:02:12 2016 -0800

----------------------------------------------------------------------
 RCMES/cli_app.py                                | 1438 ++++++++++++++++++
 ...ordex-arctic_cloud_fraction_bias_to_SRB.yaml |   65 +
 .../cordex-arctic_rlds_bias_to_SRB.yaml         |   65 +
 .../cordex-arctic_rlus_bias_to_SRB.yaml         |   65 +
 .../cordex-arctic_rsds_bias_to_SRB.yaml         |   65 +
 .../NARCCAP_paper/Fig10_and_Fig11.yaml          |   81 +
 .../NARCCAP_paper/Fig12_summer.yaml             |   75 +
 .../NARCCAP_paper/Fig12_winter.yaml             |   75 +
 .../NARCCAP_paper/Fig14_and_Fig15.yaml          |   82 +
 .../NARCCAP_paper/Fig16_summer.yaml             |   75 +
 .../NARCCAP_paper/Fig16_winter.yaml             |   75 +
 .../NARCCAP_paper/Fig5_and_Fig6.yaml            |   50 +
 .../NARCCAP_paper/Fig7_summer.yaml              |   75 +
 .../NARCCAP_paper/Fig7_winter.yaml              |   75 +
 .../NARCCAP_paper/Fig8_and_Fig9.yaml            |   50 +
 RCMES/metrics_and_plots.py                      |  243 +++
 RCMES/run_RCMES.py                              |  246 +++
 RCMES/statistical_downscaling/MPI_tas_JJA.yaml  |   29 +
 .../run_statistical_downscaling.py              |  231 +++
 RCMES/test/test.py                              |  179 +++
 .../NARCCAP_paper/Fig10_and_Fig11.yaml          |   81 -
 .../NARCCAP_paper/Fig12_summer.yaml             |   75 -
 .../NARCCAP_paper/Fig12_winter.yaml             |   75 -
 .../NARCCAP_paper/Fig14_and_Fig15.yaml          |   82 -
 .../NARCCAP_paper/Fig16_summer.yaml             |   75 -
 .../NARCCAP_paper/Fig16_winter.yaml             |   75 -
 .../NARCCAP_paper/Fig5_and_Fig6.yaml            |   50 -
 .../NARCCAP_paper/Fig7_summer.yaml              |   75 -
 .../NARCCAP_paper/Fig7_winter.yaml              |   75 -
 .../NARCCAP_paper/Fig8_and_Fig9.yaml            |   50 -
 ...ia_prec_DJF_mean_taylor_diagram_to_TRMM.yaml |   45 -
 ...ordex-AF_tasmax_annual_mean_bias_to_cru.yaml |   46 -
 ...ordex-arctic_cloud_fraction_bias_to_SRB.yaml |   65 -
 .../cordex-arctic_rlds_bias_to_SRB.yaml         |   65 -
 .../cordex-arctic_rlus_bias_to_SRB.yaml         |   65 -
 .../cordex-arctic_rsds_bias_to_SRB.yaml         |   65 -
 ...prec_subregion_annual_cycle_time_series.yaml |   90 --
 .../metrics_and_plots.py                        |  243 ---
 ...cap_prec_JJA_mean_taylor_diagram_to_cru.yaml |   44 -
 ...nterannual_variability_portrait_diagram.yaml |   75 -
 .../configuration_file_examples/run_RCMES.py    |  246 ---
 .../statistical_downscaling/MPI_tas_JJA.yaml    |   29 -
 .../run_statistical_downscaling.py              |  231 ---
 ocw-cli/cli_app.py                              | 1438 ------------------
 44 files changed, 3339 insertions(+), 3460 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/cli_app.py
----------------------------------------------------------------------
diff --git a/RCMES/cli_app.py b/RCMES/cli_app.py
new file mode 100644
index 0000000..60f5219
--- /dev/null
+++ b/RCMES/cli_app.py
@@ -0,0 +1,1438 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import curses
+import sys
+import os
+import numpy as np
+import getpass
+import urllib2
+import json
+
+from netCDF4 import Dataset
+from datetime import datetime, timedelta
+
+import ocw.metrics as metrics
+import ocw.plotter as plotter
+import ocw.dataset_processor as dsp
+import ocw.evaluation as evaluation
+import ocw.data_source.rcmed as rcmed
+from ocw.dataset import Bounds
+from ocw.data_source.local import load_file
+import ocw.utils as utils
+import ocw.data_source.esgf as esgf
+from ocw_config_runner.configuration_writer import export_evaluation_to_config
+
+import ssl
+if hasattr(ssl, '_create_unverified_context'):
+    ssl._create_default_https_context = ssl._create_unverified_context
+
+def ready_screen(page, note=""):
+    ''' Generates page borders, header, footer and notification center.
+
+    :param page: Name of current page
+    :type page: string
+    :param note: Notification that system returns and will be shown
+         at the bottom of page
+    :type note: string
+
+    :returns: y and x as location of text on screen
+    :rtype: integer
+    '''
+
+    screen.clear()
+    y, x = screen.getmaxyx()
+    screen.border(0)
+    screen.addstr(0, x/2-len(TITLE)/2, TITLE)
+    screen.addstr(y-1, x/2-len(ORGANIZATION)/2, ORGANIZATION)
+    screen.addstr(y-3, 1, "Notification:")
+    for each in range(1, x-1):
+         screen.addstr(y-4, each, "-")
+    if page == "main_menu":
+         screen.addstr(y-3, x-21, "(NC) = Not complete")
+         screen.addstr(y-2, x-21, "(C)  = Complete")
+    if page == "settings_screen":
+         for i in range(y-5):
+              screen.addstr(i+1, x/2-2, ".")
+    screen.addstr(y-2, 1, note)
+
+    return y, x
+
+
+def get_esgf_netCDF_file_name(esgf_dataset_id, esgf_variable):
+    dataset_info = esgf._get_file_download_data(esgf_dataset_id, esgf_variable)
+    netCDF_name = dataset_info[0][0].split("/")[-1]
+
+    return netCDF_name
+
+
+##############################################################
+#         Manage Model Screen
+##############################################################
+
+def load_local_model_screen(header):
+    '''Generates screen to be able to load local model file.
+    Path to model file (netCDF) and variable name is required.
+
+    :param header: Header of page
+    :type header: string
+
+    :returns: Notification
+    :rtype: string
+    '''
+
+    ready_screen("load_local_model_screen")
+    screen.addstr(1, 1, header + " > Load Local Model File ")
+    screen.addstr(4, 2, "Enter model path: ")
+    model_path = screen.getstr()
+    try:
+         netCDF_file = Dataset(model_path, 'r')
+         all_netcdf_variables = [variable.encode() for variable in netCDF_file.variables.keys()]
+         try:
+              screen.addstr(6, 2, "Enter model variable name {0}: ".format(all_netcdf_variables))
+              variable_name = screen.getstr()
+              screen.addstr(7, 4, "{0}".format(netCDF_file.variables[variable_name]))
+              screen.addstr(20, 2, "Confirm:")
+              screen.addstr(21, 4, "0- No")
+              screen.addstr(22, 4, "1- Yes")
+              screen.addstr(23, 3, "Would you take this variable:")
+              answer = screen.getstr()
+              if answer == "0":
+                   note = "WARNING: Model file cannot be added."
+              elif answer == "1":
+                   model_dataset = load_file(model_path, variable_name)
+                   model_datasets.append(model_dataset)
+                   models_info.append({'directory': model_path, 'variable_name': variable_name})
+                   note = "Model file successfully added."
+              else:
+                   note = "WARNING: Model file cannot be added."
+         except:
+              note = "WARNING: Model file cannot be added. The variable [{0}] is not accepted. Please try again.".format(variable_name)
+         netCDF_file.close()
+    except:
+         note = "WARNING: Model file cannot be read. Please check the file directory or format. Only netCDF format is accepted."
+
+    return note
+
+
+def load_esgf_model_screen(header):
+    '''Generates screen to be able to load ESGF model file.
+
+    :param header: Header of page
+    :type header: string
+
+    :returns: Notification
+    :rtype: string
+    '''
+
+    ready_screen("load_esgf_model_screen")
+    screen.addstr(1, 1, header + " > Download ESGF Dataset ")
+    screen.addstr(6, 1, "Enter Dataset ID:")
+    esgf_dataset_id = screen.getstr()
+    screen.addstr(7, 1, "Enter Variable:")
+    esgf_variable = screen.getstr()
+    screen.addstr(8, 1, "Enter Username:")
+    esgf_username = screen.getstr()
+    screen.addstr(9, 1, "Enter Password:")
+    esgf_password = screen.getstr()
+    try:
+        solr_url = "http://esg-datanode.jpl.nasa.gov/esg-search/search?id={0}&variable={1}&format=application%2Fsolr%2Bjson".format(esgf_dataset_id, esgf_variable)
+        metadata_json = json.load(urllib2.urlopen(solr_url))
+        if metadata_json['response']['docs'][0]["product"][0] != "observations":
+            screen.addstr(11, 4, "Title: {0}".format(metadata_json['response']['docs'][0]['title']))
+            screen.addstr(12, 4, "Start Date: {0}".format(metadata_json['response']['docs'][0]['datetime_start']))
+            screen.addstr(13, 4, "End Date: {0}".format(metadata_json['response']['docs'][0]['datetime_stop']))
+            screen.addstr(15, 2, "Confirm:")
+            screen.addstr(16, 4, "0- No")
+            screen.addstr(17, 4, "1- Yes")
+            screen.addstr(18, 3, "Would you take this dataset:")
+            answer = screen.getstr()
+            if answer == "0":
+                note = "WARNING: ESGF model file cannot be added."
+            elif answer == "1":
+                try:
+                    screen.addstr(20, 4, "Downloading dataset.....")
+                    screen.refresh()
+                    datasets = esgf.load_dataset(esgf_dataset_id,
+                                                esgf_variable,
+                                                esgf_username,
+                                                esgf_password)
+                    netCDF_name = get_esgf_netCDF_file_name(esgf_dataset_id, esgf_variable)
+                    netCDF_path = "/tmp/{0}".format(netCDF_name)
+                    model_dataset = load_file(netCDF_path, esgf_variable)
+                    model_datasets.append(model_dataset)
+                    models_info.append({'directory': netCDF_path, 'variable_name': esgf_variable})
+                    note = "Dataset successfully downloaded."
+                except:
+                    note = "WARNING: Dataset has not been downloaded. Check your ESGF permission."
+        else:
+            note = "The selected dataset is Observation, please enter model dataset."
+    except:
+        note = "WARNING: Something went wrong in downloading model dataset from ESGF."
+
+    return  note
+
+
+def unload_model_screen(header):
+    '''Generates screen to be able to unload model file.
+    It lists all loaded model with index for each.
+    Selection of model with index will remove model from list of models.
+
+    :param header: Header of page
+    :type header: string
+
+    :returns: Notification
+    :rtype: string
+    '''
+
+    ready_screen("unload_model_screen")
+    screen.addstr(1, 1, header + " > Unload Model File")
+    screen.addstr(6, 1, "List of Model:")
+    for i, model in enumerate(models_info):
+         screen.addstr(8 + i, 10, "Model Number:[{0}] - Model path:[{1}] - Variables:[{2}]".format(str(i), model['directory'], model['variable_name']))
+    screen.addstr(3, 2, "Select the model number to remove (press enter to go back): ")
+    try:
+         model_remove_index = screen.getstr()
+         models_info.pop(int(model_remove_index))
+         model_datasets.pop(int(model_remove_index))
+         note = "Model file unloaded successfully"
+    except:
+         note = "WARNING: Model file not unloaded successfully."
+
+    return note
+
+
+def list_model_screen(header):
+    '''Generates screen to list all model files.
+
+    :param header: Header of page
+    :type header: string
+    '''
+
+    ready_screen("list_model_screen")
+    screen.addstr(1, 1, header + " > List Model File ")
+    screen.addstr(6, 6, "List of model(s): ")
+    for i, model in enumerate(models_info):
+         screen.addstr(8 + i, 10, "Model Number:[{0}] - Model path:[{1}] - Variables:[{2}]".format(str(i), model['directory'], model['variable_name']))
+    screen.addstr(4, 4, "Return to Manage Model (press Enter) :")
+    screen.getstr()
+
+
+def manage_model_screen(header, note=""):
+    '''Generates Manage Model screen.
+
+    :param header: Header of page
+    :type header: string
+    :param note: Notification, defult to empty string.
+    :type note: string
+    '''
+
+    option = ''
+    while option != '0':
+         ready_screen("manage_model_screen", note)
+         screen.addstr(1, 1, header)
+         screen.addstr(4, 4, "1 - Load Local Model File")
+         screen.addstr(6, 4, "2 - Load ESGF Model File")
+         screen.addstr(8, 4, "3 - Unload Model File")
+         screen.addstr(10, 4, "4 - List Model File")
+         screen.addstr(12, 4, "0 - Return to Main Menu")
+         screen.addstr(14, 2, "Select an option: ")
+         screen.refresh()
+         option = screen.getstr()
+
+         if option == '1':
+              note = load_local_model_screen(header)
+         if option == '2':
+              note = load_esgf_model_screen(header)
+         if option == '3':
+              note = unload_model_screen(header)
+         if option == '4':
+              note = list_model_screen(header)
+              note = " "
+
+
+##############################################################
+#     Manage Observation Screen
+##############################################################
+
+def select_obs_screen(header):   #TODO: if the observation is already selected, don't select again.
+    '''Generates screen to select observation.
+    It reterives list of observations from database and make a table from that.
+    User has to select observation with dataset_id, parameter_id.
+    If the size of terminal screen is small to show whole table, a notification with link to parameter table on website will show up instead.
+
+    :param header: Header of page
+    :type header: string
+
+    :returns: Notification
+    :rtype: string
+    '''
+
+    ready_screen("select_obs_screen")
+    screen.addstr(1, 1, header + " > Select Observation ")
+    screen.addstr(7, 1, "Observations Table: ")
+    screen.addstr(8, 2, "|D-ID| - |P-ID| - |Database")
+    screen.addstr(9, 2, "|----| - |----| - |--------")
+    all_obs_info = rcmed.get_parameters_metadata()
+    new_all_obs_info = []
+    for each in all_obs_info:
+        if not each['parameter_id'] in ['72', '73', '74', '75', '80', '42', '81', '84', '85', '86', '89', '90', '91', '94', '95', '96', '97', '98', '99', '100', '101', '103', '106']:
+            new_all_obs_info.append(each)
+    all_obs_info = new_all_obs_info
+    del new_all_obs_info
+    try:
+         for position, obs_info in enumerate(all_obs_info):
+            dataset_id = obs_info['dataset_id']
+            parameter_id = obs_info['parameter_id']
+            database = obs_info['database']
+            line = "|{0:>4}| - |{1:>4}| - |{2}".format(dataset_id, parameter_id, database)
+            if position <= 25:
+                 screen.addstr(10 + position, 2, line)
+            elif position > 25 and position <= 50:
+                 screen.addstr(8, 50, "|D-ID| - |P-ID| - |Database")
+                 screen.addstr(9, 50, "|----| - |----| - |--------")
+                 screen.addstr(10 + position - 26, 50, line)
+            else:
+                 screen.addstr(8, 100, "|D-ID| - |P-ID| - |Database")
+                 screen.addstr(9, 100, "|----| - |----| - |--------")
+                 screen.addstr(10 + position - 51, 100, line)
+    except:
+         ready_screen("select_obs_screen")
+         screen.addstr(1, 1, header + " > Select Observation ")
+         screen.addstr(10, 1, "Observation table cannot be shown due to small screen size. ")
+         screen.addstr(11, 1, "Please enlarge your screen and try again or refer to 'https://rcmes.jpl.nasa.gov/content/data-rcmes-database'. ")
+    try:
+         screen.addstr(2, 1, "More info for observation: https://rcmes.jpl.nasa.gov/content/data-rcmes-database")
+         screen.addstr(4, 2, "Enter Dataset ID (D-ID): ")
+         dataset_id = screen.getstr()
+         screen.addstr(5, 2, "Enter Parameter ID (P-ID): ")
+         parameter_id = screen.getstr()
+
+         for obs in all_obs_info:
+              if obs['dataset_id'] == dataset_id and obs['parameter_id'] == parameter_id:
+                   observations_info.append({
+                        'database':obs['database'],
+                        'dataset_id':dataset_id,
+                        'parameter_id':parameter_id,
+                        'start_date':obs['start_date'],
+                        'end_date':obs['end_date'],
+                        'bounding_box':obs['bounding_box'],
+                        'timestep':obs['timestep'],
+                        'min_lat':float(eval(obs['bounding_box'].encode())[2][0]) if obs['bounding_box'] else None,
+                        'max_lat':float(eval(obs['bounding_box'].encode())[0][0]) if obs['bounding_box'] else None,
+                        'min_lon':float(eval(obs['bounding_box'].encode())[2][1]) if obs['bounding_box'] else None,
+                        'max_lon':float(eval(obs['bounding_box'].encode())[0][1]) if obs['bounding_box'] else None,
+                        'lat_res':float(obs['lat_res'].encode()),
+                        'lon_res':float(obs['lon_res'].encode()),
+                        'unit':obs['units']
+                        })
+                   note = "Observation sucessfully selected."
+                   break
+              else:
+                   note = "WARNING: Observation cannot be selected. There is no observation with given info."
+    except:
+         note = "WARNING: Observation cannot be selected, dataset or parameter id is wrong."
+
+    return  note
+
+
+def load_esgf_obs_screen(header):
+    '''Generates screen to be able to load ESGF observation file.
+
+    :param header: Header of page
+    :type header: string
+
+    :returns: Notification
+    :rtype: string
+    '''
+
+    ready_screen("load_esgf_obs_screen")
+    screen.addstr(1, 1, header + " > Download ESGF Dataset ")
+    screen.addstr(6, 1, "Enter Dataset ID:")
+    esgf_dataset_id = screen.getstr()
+    screen.addstr(7, 1, "Enter Variable:")
+    esgf_variable = screen.getstr()
+    screen.addstr(8, 1, "Enter Username:")
+    esgf_username = screen.getstr()
+    screen.addstr(9, 1, "Enter Password:")
+    esgf_password = screen.getstr()
+    try:
+        solr_url = "http://esg-datanode.jpl.nasa.gov/esg-search/search?id={0}&variable={1}&format=application%2Fsolr%2Bjson".format(esgf_dataset_id, esgf_variable)
+        metadata_json = json.load(urllib2.urlopen(solr_url))
+        all_variables = metadata_json['response']['docs'][0]['variable']
+        variable_index = all_variables.index(esgf_variable)
+        if metadata_json['response']['docs'][0]["product"][0] == "observations":
+            screen.addstr(11, 4, "Variable Long Name: {0}".format(metadata_json['response']['docs'][0]['variable_long_name'][variable_index]))
+            screen.addstr(12, 4, "Start Date: {0}".format(metadata_json['response']['docs'][0]['datetime_start']))
+            screen.addstr(13, 4, "End Stop: {0}".format(metadata_json['response']['docs'][0]['datetime_stop']))
+            screen.addstr(14, 4, "Time Frequency: {0}".format(metadata_json['response']['docs'][0]['time_frequency']))
+            screen.addstr(15, 4, "Variable Units: {0}".format(metadata_json['response']['docs'][0]['variable_units'][variable_index]))
+            screen.addstr(16, 4, "East Degrees: {0}".format(metadata_json['response']['docs'][0]['east_degrees']))
+            screen.addstr(17, 4, "North Degrees: {0}".format(metadata_json['response']['docs'][0]['north_degrees']))
+            screen.addstr(18, 4, "South Degrees: {0}".format(metadata_json['response']['docs'][0]['south_degrees']))
+            screen.addstr(19, 4, "West Degrees: {0}".format(metadata_json['response']['docs'][0]['west_degrees']))
+            screen.addstr(22, 2, "Confirm:")
+            screen.addstr(23, 4, "0- No")
+            screen.addstr(24, 4, "1- Yes")
+            screen.addstr(25, 3, "Would you take this dataset:")
+            answer = screen.getstr()
+            if answer == "0":
+                note = "WARNING: ESGF observation file cannot be added."
+            elif answer == "1":
+                try:
+                    screen.addstr(27, 4, "Downloading dataset.....")
+                    screen.refresh()
+                    datasets = esgf.load_dataset(esgf_dataset_id,
+                                                esgf_variable,
+                                                esgf_username,
+                                                esgf_password)
+                    netCDF_name = get_esgf_netCDF_file_name(esgf_dataset_id, esgf_variable)
+                    netCDF_path = "/tmp/{0}".format(netCDF_name)
+                    obs_dataset = load_file(netCDF_path, esgf_variable)
+                    observations_info.append({
+                     'database':"{0}".format(netCDF_path),
+                     'dataset_id':"esgf".format(esgf_variable),
+                     'parameter_id':"{0}".format(esgf_variable),
+                     'start_date': obs_dataset.time_range()[0].strftime("%Y-%m-%d"),
+                     'end_date':obs_dataset.time_range()[1].strftime("%Y-%m-%d"),
+                     #'bounding_box':obs['bounding_box'],
+                     'timestep':"monthly",
+                     'min_lat':obs_dataset.spatial_boundaries()[0],
+                     'max_lat':obs_dataset.spatial_boundaries()[1],
+                     'min_lon':obs_dataset.spatial_boundaries()[2],
+                     'max_lon':obs_dataset.spatial_boundaries()[3],
+                     'lat_res':obs_dataset.spatial_resolution()[0],
+                     'lon_res':obs_dataset.spatial_resolution()[1],
+                     'unit':"{0}".format(metadata_json['response']['docs'][0]['variable_units'][1])
+                     })
+                    note = "Dataset successfully downloaded."
+                except:
+                    note = "WARNING: Dataset has not been downloaded."
+        else:
+            note = "The selected dataset is not Observation, please enter observation dataset."
+    except:
+        note = "WARNING: Something went wrong in downloading observation dataset from ESGF."
+
+    return  note
+
+
+def unselect_obs_screen(header):
+    '''Generates screen to be able to unselect observations.
+    Observations can be unselected by entering index allocated to them.
+
+    :param header: Header of page
+    :type header: string
+
+    :returns: Notification
+    :rtype: string
+    '''
+
+    ready_screen("unselect_obs_screen")
+    screen.addstr(1, 1, header + " > Unselect Observation ")
+    screen.addstr(6, 1, "List Observation(s):")
+    for i, obs_info in enumerate(observations_info):
+         screen.addstr(8 + i, 10, " [" + str(i) + "] : " + " Dataset ID: " + obs_info['dataset_id'] + " - Parameter ID: "+ obs_info['parameter_id'] + " - Database: "+ obs_info['database'])
+    screen.addstr(3, 2, "Select the observation to remove (press enter to go back): ")
+    try:
+         obs_remove_index = screen.getstr()
+         observations_info.pop(int(obs_remove_index))
+         note = "Observation sucessfully unselected."
+    except:
+         note = "WARNING: Unselecting model was not successful."
+
+    return note
+
+
+def list_obs_screen(header):
+    '''Generates screen to list observations.
+
+    :param header: Header of page
+    :type header: string
+    '''
+
+    ready_screen("list_obs_screen")
+    screen.addstr(1, 1, header + " > List Observation ")
+    screen.addstr(6, 6, "List of observation(s): ")
+    for i, obs_info in enumerate(observations_info):
+         screen.addstr(8 + i, 10, " [" + str(i) + "] : " + " Dataset ID: " + obs_info['dataset_id'] + " - Parameter ID: "+ obs_info['parameter_id'] + " - Database: "+ obs_info['database'])
+    screen.addstr(4, 4, "Return to Manage Observation (press Enter) :")
+    screen.getstr()
+
+
+def manage_obs_screen(header, note=""):
+    '''Generates Manage Observation screen.
+
+    :param header: Header of page
+    :type header: string
+    :param note: Notification, defult to empty string.
+    :type note: string
+    '''
+
+    option = ''
+    while option != '0':
+         ready_screen("manage_obs_screen", note)
+         screen.addstr(1, 1, header)
+         screen.addstr(4, 4, "1 - Select Observation")
+         screen.addstr(6, 4, "2 - Load ESGF Observation")
+         screen.addstr(8, 4, "3 - Unselect Observation")
+         screen.addstr(10, 4, "4 - List Observation")
+         screen.addstr(12, 4, "0 - Return to Main Menu")
+         screen.addstr(14, 2, "Select an option: ")
+         screen.refresh()
+
+         option = screen.getstr()
+         if option == '1':
+              note = select_obs_screen(header)
+         if option == '2':
+              note = load_esgf_obs_screen(header)
+         if option == '3':
+              note = unselect_obs_screen(header)
+         if option == '4':
+              list_obs_screen(header)
+              note = " "
+
+
+##############################################################
+#     Run Evaluation Screen
+##############################################################
+
+def run_screen(model_datasets, models_info, observations_info,
+               overlap_start_time, overlap_end_time, overlap_min_lat,
+               overlap_max_lat, overlap_min_lon, overlap_max_lon,
+               temp_grid_setting, spatial_grid_setting_lat, spatial_grid_setting_lon, reference_dataset, target_datasets, metric, working_directory, plot_title):
+    '''Generates screen to show running evaluation process.
+
+    :param model_datasets: list of model dataset objects
+    :type model_datasets: list
+    :param models_info: list of dictionaries that contain information for each model
+    :type models_info: list
+    :param observations_info: list of dictionaries that contain information for each observation
+    :type observations_info: list
+    :param overlap_start_time: overlap start time between model and obs start time
+    :type overlap_start_time: datetime
+    :param overlap_end_time: overlap end time between model and obs end time
+    :type overlap_end_time: float
+    :param overlap_min_lat: overlap minimum lat between model and obs minimum lat
+    :type overlap_min_lat: float
+    :param overlap_max_lat: overlap maximum lat between model and obs maximum lat
+    :type overlap_max_lat: float
+    :param overlap_min_lon: overlap minimum lon between model and obs minimum lon
+    :type overlap_min_lon: float
+    :param overlap_max_lon: overlap maximum lon between model and obs maximum lon
+    :type overlap_max_lon: float
+    :param temp_grid_setting: temporal grid option such as hourly, daily, monthly and annually
+    :type temp_grid_setting: string
+    :param spatial_grid_setting:
+    :type spatial_grid_setting: string
+    :param reference_dataset: dictionary of reference dataset
+    :type reference_dataset: dictionary
+    :param target_datasets: dictionary of all target datasets
+    :type target_datasets: dictionary
+    :param metric: name of selected metric
+    :type metric: string
+    :param working_directory: path to a directory for storring outputs
+    :type working_directory: string
+    :param plot_title: Title for plot
+    :type plot_title: string
+    '''
+    try:
+        target_datasets_ensemble = []
+        new_model_datasets = model_datasets[:]
+
+        option = None
+        if option != "0":
+             ready_screen("run_evaluation_screen")
+             y = screen.getmaxyx()[0]
+             screen.addstr(2, 2, "Evaluation started....")
+             screen.refresh()
+
+             screen.addstr(4, 4, "Retrieving data...")
+             screen.refresh()
+             obs_dataset = []
+             for i in range(len(observations_info)):
+                  if observations_info[i]['dataset_id'] == "esgf":
+                      obs_dataset.append(load_file(observations_info[i]['database'], observations_info[i]['parameter_id']))
+                  else:
+                      dataset_id = int(observations_info[i]['dataset_id'])
+                      parameter_id = int(observations_info[i]['parameter_id'])
+                      obs_dataset.append(rcmed.parameter_dataset(
+                          dataset_id,
+                          parameter_id,
+                          overlap_min_lat,
+                          overlap_max_lat,
+                          overlap_min_lon,
+                          overlap_max_lon,
+                          overlap_start_time,
+                          overlap_end_time))
+
+             screen.addstr(4, 4, "--> Data retrieved.")
+             screen.refresh()
+
+             EVAL_BOUNDS = Bounds(overlap_min_lat, overlap_max_lat, overlap_min_lon, overlap_max_lon, overlap_start_time, overlap_end_time)
+
+             screen.addstr(5, 4, "Temporally regridding...")
+             screen.refresh()
+             if temp_grid_setting.lower() == 'hourly':
+                  days = 0.5
+             elif temp_grid_setting.lower() == 'daily':
+                  days = 1
+             elif temp_grid_setting.lower() == 'monthly':
+                  days = 31
+             else:
+                  days = 365
+             for i in range(len(obs_dataset)):
+                  obs_dataset[i] = dsp.temporal_rebin(obs_dataset[i], timedelta(days))
+
+             for member, each_target_dataset in enumerate(new_model_datasets):
+                  new_model_datasets[member] = dsp.temporal_rebin(new_model_datasets[member], timedelta(days))
+                  if each_target_dataset.lats.ndim !=2 and each_target_dataset.lons.ndim !=2:
+                      new_model_datasets[member] = dsp.subset(EVAL_BOUNDS, new_model_datasets[member])
+                  else:
+                      new_model_datasets[member] = dsp.temporal_slice(EVAL_BOUNDS.start, EVAL_BOUNDS.end, each_target_dataset)
+             screen.addstr(5, 4, "--> Temporally regridded.")
+             screen.refresh()
+
+             screen.addstr(6, 4, "Spatially regridding...")
+             screen.refresh()
+             new_lats = np.arange(overlap_min_lat, overlap_max_lat, spatial_grid_setting_lat)
+             new_lons = np.arange(overlap_min_lon, overlap_max_lon, spatial_grid_setting_lon)
+             for i in range(len(obs_dataset)):
+                  obs_dataset[i] = dsp.spatial_regrid(obs_dataset[i], new_lats, new_lons)
+                  obs_dataset[i] = dsp.variable_unit_conversion(obs_dataset[i])
+
+             for member, each_target_dataset in enumerate(new_model_datasets):
+                  new_model_datasets[member] = dsp.spatial_regrid(new_model_datasets[member], new_lats, new_lons)
+                  new_model_datasets[member] = dsp.variable_unit_conversion(new_model_datasets[member])
+             screen.addstr(6, 4, "--> Spatially regridded.")
+             screen.refresh()
+
+             obs_dataset = dsp.mask_missing_data(obs_dataset+new_model_datasets)[0:len(obs_dataset)]
+             new_model_datasets = dsp.mask_missing_data(obs_dataset+new_model_datasets)[len(obs_dataset):]
+
+             if metric == 'bias':
+                  allNames = []
+
+                  for model in new_model_datasets:
+                          allNames.append(model.name)
+
+                  screen.addstr(7, 4, "Setting up metrics...")
+                  screen.refresh()
+                  mean_bias = metrics.TemporalMeanBias()
+                  pattern_correlation = metrics.PatternCorrelation()
+                  spatial_std_dev_ratio = metrics.StdDevRatio()
+                  screen.addstr(7, 4, "--> Metrics setting done.")
+                  screen.refresh()
+
+                  screen.addstr(8, 4, "Running evaluation.....")
+                  screen.refresh()
+                  if reference_dataset[:3] == 'obs':
+                       reference = obs_dataset[int(reference_dataset[-1])]
+                  if reference_dataset[:3] == 'mod':
+                       reference = obs_dataset[int(new_model_datasets[-1])]
+
+                  targets = []
+                  for target in target_datasets:
+                       if target[:3] == 'obs':
+                            targets.append(obs_dataset[int(target[-1])])
+                       if target[:3] == 'mod':
+                            targets.append(new_model_datasets[int(target[-1])])
+
+                  evaluation_result = evaluation.Evaluation(reference, targets, [mean_bias])
+                  #export_evaluation_to_config(evaluation_result)
+                  evaluation_result.run()
+                  screen.addstr(8, 4, "--> Evaluation Finished.")
+                  screen.refresh()
+
+                  screen.addstr(9, 4, "Generating plots....")
+                  screen.refresh()
+                  new_rcm_bias = evaluation_result.results[0]
+
+                  if not os.path.exists(working_directory):
+                       os.makedirs(working_directory)
+
+                  fname = working_directory + 'Bias_contour'
+                  fname2= working_directory + 'Obs_contour'
+                  fname3= working_directory + 'Model_contour'
+                  plotter.draw_contour_map(new_rcm_bias, new_lats, new_lons, gridshape=(2, 5), fname=fname, subtitles=allNames, cmap='coolwarm_r')
+                  plotter.draw_contour_map(utils.calc_temporal_mean(reference), new_lats, new_lons, gridshape=(2, 5), fname=fname2, subtitles=allNames, cmap='coolwarm_r')
+                  plotter.draw_contour_map(utils.calc_temporal_mean(targets[0]), new_lats, new_lons, gridshape=(2, 5), fname=fname3, subtitles=allNames, cmap='coolwarm_r')
+                  screen.addstr(9, 4, "--> Plots generated.")
+                  screen.refresh()
+                  screen.addstr(y-2, 1, "Press 'enter' to Exit: ")
+                  option = screen.getstr()
+
+             if metric == 'std':
+                  for i in range(len(obs_dataset)):
+                       _, obs_dataset[i].values = utils.calc_climatology_year(obs_dataset[i])
+                       obs_dataset[i].values = np.expand_dims(obs_dataset[i].values, axis=0)
+
+                  target_datasets_ensemble = dsp.ensemble(new_model_datasets)
+                  target_datasets_ensemble.name = "ENS"
+                  new_model_datasets.append(target_datasets_ensemble)
+
+                  for member, each_target_dataset in enumerate(new_model_datasets):
+                          _, new_model_datasets[member].values = utils.calc_climatology_year(new_model_datasets[member])
+                          new_model_datasets[member].values = np.expand_dims(new_model_datasets[member].values, axis=0)
+
+                  allNames = []
+
+                  for model in new_model_datasets:
+                          allNames.append(model.name)
+                  pattern_correlation = metrics.PatternCorrelation()
+                  spatial_std_dev = metrics.StdDevRatio()
+
+                  if reference_dataset[:3] == 'obs':
+                       reference = obs_dataset[int(reference_dataset[-1])]
+                  if reference_dataset[:3] == 'mod':
+                       reference = obs_dataset[int(new_model_datasets[-1])]
+
+                  targets = []
+                  for target in target_datasets:
+                       if target[:3] == 'obs':
+                            targets.append(obs_dataset[int(target[-1])])
+                       if target[:3] == 'mod':
+                            targets.append(new_model_datasets[int(target[-1])])
+
+                  evaluation_result = evaluation.Evaluation(reference, targets, [spatial_std_dev])
+                  export_evaluation_to_config(evaluation_result)
+                  evaluation_result.run()
+
+                  rcm_std_dev = evaluation_result.results
+                  evaluation_result = evaluation.Evaluation(reference, targets, [pattern_correlation])
+                  evaluation_result.run()
+
+                  rcm_pat_cor = evaluation_result.results
+                  taylor_data = np.array([rcm_std_dev, rcm_pat_cor]).transpose()
+                  new_taylor_data = np.squeeze(np.array(taylor_data))
+
+                  if not os.path.exists(working_directory):
+                       os.makedirs(working_directory)
+
+                  fname = working_directory + 'taylor_plot'
+
+                  plotter.draw_taylor_diagram(new_taylor_data, allNames, "CRU31", fname=fname, fmt='png', frameon=False)
+        del new_model_datasets
+        del obs_dataset
+        return "No error"
+    except Exception, error:
+         return "Error: {0}".format(error[0][:200])
+
+
+##############################################################
+#     Settings Screen
+##############################################################
+
+def get_models_temp_bound():
+    '''Get models temporal bound.
+
+    :returns: model start and end time
+    :rtypes: (datatime, datetime)
+    '''
+
+    models_start_time = []
+    models_end_time = []
+    for model in model_datasets:
+         models_start_time.append(model.time_range()[0])
+         models_end_time.append(model.time_range()[1])
+
+    return models_start_time, models_end_time
+
+
+def get_obs_temp_bound():
+    '''Get observation temporal bound.
+
+    :returns: observation start and end time
+    :rtype: (datetime, datetime)
+    '''
+
+    observations_start_time = []
+    observations_end_time = []
+    for obs in observations_info:
+         obs_start_time = datetime.strptime(obs['start_date'], "%Y-%m-%d")
+         observations_start_time.append(obs_start_time)
+         obs_end_time = datetime.strptime(obs['end_date'], "%Y-%m-%d")
+         observations_end_time.append(obs_end_time)
+
+    return observations_start_time, observations_end_time
+
+
+def get_models_temp_overlap(models_start_time, models_end_time):
+    '''Calculate temporal overlap between all the models
+
+    :param models_start_time: models start time
+    :type models_start_time: list of datetimes
+    :param models_end_time: models end time
+    :type models_end_time: list of datetime
+
+    :returns: overlap start and end time between all the models
+    :rtype: (datetime, datetime)
+    '''
+
+    models_overlap_start_time = max(models_start_time)
+    models_overlap_end_time = min(models_end_time)
+
+    #Need to check if all models have temporal overlap, otherwise return
+    # to main menu and print a warning as notification.
+    if models_overlap_end_time <= models_overlap_start_time:
+         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more model does not have temporal overlap with others.")
+
+    return models_overlap_start_time, models_overlap_end_time
+
+
+def get_obs_temp_overlap(observations_start_time, observations_end_time):
+    '''Calculate temporal overlap between all the observations
+
+    :param observations_start_time: observations start time
+    :type observations_start_time: list of datetimes
+    :param observations_end_time: observations end time
+    :type observations_end_time: list of datetime
+
+    :returns: overlap start and end time between all the observations
+    :rtype: (datetime, datetime)
+    '''
+
+    obs_overlap_start_time = max(observations_start_time)
+    obs_overlap_end_time = min(observations_end_time)
+
+    #Need to check if all observations have temporal overlap, otherwise return
+    # to main menu and print a warning as notification.
+    if obs_overlap_end_time <= obs_overlap_start_time:
+         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more observation does not have temporal overlap with others.")
+
+    return obs_overlap_start_time, obs_overlap_end_time
+
+
+def get_all_temp_overlap(models_overlap_start_time, models_overlap_end_time, obs_overlap_start_time, obs_overlap_end_time):
+    '''Calculate temporal overlap between given datasets.
+
+    :param models_overlap_start_time: models overlap start time
+    :type models_overlap_start_time: list of datetimes
+    :param models_overlap_end_time: models overlap end time
+    :type models_overlap_end_time: list of datetime
+    :param obs_overlap_start_time: obs overlap start time
+    :type obs_overlap_start_time: list of datetimes
+    :param obs_overlap_end_time: obs overlap end time
+    :type obs_overlap_end_time: list of datetimes
+
+    :returns: overlap start and end time between models and observations
+    :rtype: (datetime, datetime)
+    '''
+
+    all_overlap_start_time = max([models_overlap_start_time, obs_overlap_start_time])
+    all_overlap_end_time = min([models_overlap_end_time, obs_overlap_end_time])
+
+    #Need to check if all datasets have temporal overlap, otherwise return
+    # to main menu and print a warning as notification.
+    if all_overlap_end_time <= all_overlap_start_time:
+         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more dataset does not have temporal overlap with others.")
+
+    return all_overlap_start_time, all_overlap_end_time
+
+
+def get_models_spatial_bound():               #TODO: convert longitudes to -180, 180 to match with observation data
+    '''Get all models spatial bound.
+
+    :returns: all models spatial boundaries
+    :rtype: list
+    '''
+
+    models_bound = []
+    for model in model_datasets:
+         models_bound.append(model.spatial_boundaries())
+
+    return models_bound
+
+
+def get_models_spatial_overlap(models_bound):
+    '''Calculate spatial overlap between all models.
+
+    :param models_bound: all models spatial boundaries information
+    :type models_bound: list
+
+    :returns: spatial boundaries overlap between all models
+    :rtype: (float, float, float, float)
+    '''
+
+    models_overlap_min_lat = max(each[0] for each in models_bound)
+    models_overlap_max_lat = min(each[1] for each in models_bound)
+    models_overlap_min_lon = max(each[2] for each in models_bound)
+    models_overlap_max_lon = min(each[3] for each in models_bound)
+
+    #Need to check if all models have spatial overlap, otherwise return
+    # to main menu and print a warning as notification.
+    if models_overlap_max_lat <= models_overlap_min_lat or models_overlap_max_lon <= models_overlap_min_lon:
+         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more model does not have spatial overlap with others.")
+
+    return models_overlap_min_lat, models_overlap_max_lat, models_overlap_min_lon, models_overlap_max_lon
+
+
+def get_obs_spatial_bound():
+    '''Get all observations spatial bound.
+
+    :returns: all observations spatial boundaries
+    :rtype: list
+    '''
+
+    observations_bound = []
+    for obs in observations_info:
+         observations_bound.append([obs['min_lat'], obs['max_lat'], obs['min_lon'], obs['max_lon']])
+
+    return observations_bound
+
+
+def get_obs_spatial_overlap(observations_bound):
+    '''Calculate spatial overlap between all observations.
+
+    :param observations_bound: all observations spatial boundaries information
+    :type observations_bound: list
+
+    :returns: spatial boundaries overlap between all observations
+    :rtype: (float, float, float, float)
+    '''
+
+    obs_overlap_min_lat = max(each[0] for each in observations_bound)
+    obs_overlap_max_lat = min(each[1] for each in observations_bound)
+    obs_overlap_min_lon = max(each[2] for each in observations_bound)
+    obs_overlap_max_lon = min(each[3] for each in observations_bound)
+
+    #Need to check if all observations have spatial overlap, otherwise return
+    # to main menu and print a warning as notification.
+    if obs_overlap_max_lat <= obs_overlap_min_lat or obs_overlap_max_lon <= obs_overlap_min_lon:
+         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more observation does not have spatial overlap with others.")
+
+    return obs_overlap_min_lat, obs_overlap_max_lat, obs_overlap_min_lon, obs_overlap_max_lon
+
+
+def get_all_spatial_overlap(models_overlap_min_lat, models_overlap_max_lat, models_overlap_min_lon, models_overlap_max_lon, obs_overlap_min_lat, obs_overlap_max_lat, obs_overlap_min_lon, obs_overlap_max_lon):
+    '''Calculate spatial overlap between all models and observations
+
+    :param models_overlap_min_lat: min latitude between all models
+    :type models_overlap_min_lat: float
+    :param models_overlap_max_lat: max latitude between all models
+    :type models_overlap_max_lat: float
+    :param models_overlap_min_lon: min longitude between all models
+    :type models_overlap_min_lon: float
+    :param models_overlap_max_lon: max longitude between all models
+    :type models_overlap_max_lon: float
+    :param obs_overlap_min_lat: min latitude between all onservations
+    :type obs_overlap_min_lat: float
+    :param obs_overlap_max_lat: max latitude between all onservations
+    :type obs_overlap_max_lat: float
+    :param obs_overlap_min_lon: min longitude between all onservations
+    :type obs_overlap_min_lon: float
+    :param obs_overlap_max_lon: max longitude between all onservations
+    :type obs_overlap_max_lon: float
+
+    :returns: spatial boundaries overlap between all models and observations
+    :rtype: (float, float, float, float)
+    '''
+
+    all_overlap_min_lat = max([models_overlap_min_lat, obs_overlap_min_lat])
+    all_overlap_max_lat = min([models_overlap_max_lat, obs_overlap_max_lat])
+    all_overlap_min_lon = max([models_overlap_min_lon, obs_overlap_min_lon])
+    all_overlap_max_lon = min([models_overlap_max_lon, obs_overlap_max_lon])
+
+    #Need to check if all datasets have spatial overlap, otherwise return
+    # to main menu and print a warning as notification.
+    if all_overlap_max_lat <= all_overlap_min_lat or all_overlap_max_lon <= all_overlap_min_lon:
+         main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: One or more dataset does not have spatial overlap with others.")
+
+    return all_overlap_min_lat, all_overlap_max_lat, all_overlap_min_lon, all_overlap_max_lon
+
+
+def get_models_temp_res():
+    '''Get models temporal resolution.
+
+    :returns: models resolution
+    :rtypes: string
+    '''
+
+    models_resolution = []
+    for model in model_datasets:
+         models_resolution.append(model.temporal_resolution())
+    dic = {0:"hourly", 1:"daily", 2:"monthly", 3:"yearly"}
+    models_resolution_key = []
+    for res in models_resolution:
+         for key, value in dic.items():
+              if value == res:
+                   models_resolution_key.append(key)
+
+    return dic[max(models_resolution_key)]
+
+
+def get_obs_temp_res():
+    '''Get observations temporal resolution.
+
+    :returns: observations resolution
+    :rtypes: string
+    '''
+
+    obs_resolution = []
+    for model in model_datasets:
+         obs_resolution.append(model.temporal_resolution())
+    dic = {0:"hourly", 1:"daily", 2:"monthly", 3:"yearly"}
+    obs_resolution_key = []
+    for res in obs_resolution:
+         for key, value in dic.items():
+              if value == res:
+                   obs_resolution_key.append(key)
+
+    return dic[max(obs_resolution_key)]
+
+
+def get_models_spatial_res():
+    '''Get models spatial resolution
+
+    :returns: maximum models latitude and longitude resolution
+    :rtypes: float, float
+    '''
+
+    models_lat_res = []
+    models_lon_res = []
+    for model in model_datasets:
+         models_lat_res.append(model.spatial_resolution()[0])
+         models_lon_res.append(model.spatial_resolution()[1])
+
+    return max(models_lat_res), max(models_lon_res)
+
+
+def get_obs_spatial_res():
+    '''Get observations spatial resolution
+
+    :returns: maximum observations latitude and longitude resolution
+    :rtypes: float, float
+    '''
+
+    obs_lat_res = []
+    obs_lon_res = []
+    for obs in observations_info:
+         obs_lat_res.append(obs['lat_res'])
+         obs_lon_res.append(obs['lon_res'])
+
+    return max(obs_lat_res), max(obs_lon_res)
+
+
+def settings_screen(header):
+    '''Generates screen for settings before running evaluation.
+
+    :param header: Header of page
+    :type header: string
+    '''
+
+    note = " "
+    models_start_time, models_end_time = get_models_temp_bound()
+    models_overlap_start_time, models_overlap_end_time = get_models_temp_overlap(models_start_time, models_end_time)
+    observations_start_time, observations_end_time = get_obs_temp_bound()
+    obs_overlap_start_time, obs_overlap_end_time = get_obs_temp_overlap(observations_start_time, observations_end_time)
+    all_overlap_start_time, all_overlap_end_time = get_all_temp_overlap(models_overlap_start_time, models_overlap_end_time, obs_overlap_start_time, obs_overlap_end_time)
+    models_bound = get_models_spatial_bound()
+    models_overlap_min_lat, models_overlap_max_lat, models_overlap_min_lon, models_overlap_max_lon = get_models_spatial_overlap(models_bound)
+    observations_bound = get_obs_spatial_bound()
+    obs_overlap_min_lat, obs_overlap_max_lat, obs_overlap_min_lon, obs_overlap_max_lon = get_obs_spatial_overlap(observations_bound)
+    all_overlap_min_lat, all_overlap_max_lat, all_overlap_min_lon, all_overlap_max_lon = get_all_spatial_overlap(models_overlap_min_lat,
+                                                                                                                 models_overlap_max_lat,
+                                                                                                                 models_overlap_min_lon,
+                                                                                                                 models_overlap_max_lon,
+                                                                                                                 obs_overlap_min_lat,
+                                                                                                                 obs_overlap_max_lat,
+                                                                                                                 obs_overlap_min_lon,
+                                                                                                                 obs_overlap_max_lon)
+    model_temp_res = get_models_temp_res()
+    obs_temp_res = get_obs_temp_res()
+    model_lat_res, model_lon_res = get_models_spatial_res()
+    obs_lat_res, obs_lon_res = get_obs_spatial_res()
+
+    temp_grid_option = "Observation"
+    temp_grid_setting = obs_temp_res
+    spatial_grid_option = "Observation"
+    spatial_grid_setting_lat = obs_lat_res
+    spatial_grid_setting_lon = obs_lon_res
+    models_dict = {}
+
+    for i in enumerate(models_info):
+         models_dict['mod{0}'.format(i[0])] = models_info[i[0]]
+    obs_dict = {}
+    for i in enumerate(observations_info):
+         obs_dict['obs{0}'.format(i[0])] = observations_info[i[0]]
+
+    reference_dataset = 'obs0'
+    target_datasets = []
+    for i in range(len(model_datasets)):
+         target_datasets.append('mod{0}'.format(i))
+    subregion_path = None
+    metrics_dict = {'1':'bias', '2':'std'}
+    metric = 'bias'
+    plots = {'bias':"contour map", 'std':"taylor diagram, bar chart(coming soon)"}
+    working_directory = os.getcwd() + "/plots/"  #Default value of working directory set to "plots" folder in current directory
+    plot_title = '' #TODO: ask user about plot title or figure out automatically
+
+    fix_min_time = all_overlap_start_time
+    fix_max_time = all_overlap_end_time
+    fix_min_lat = all_overlap_min_lat
+    fix_max_lat = all_overlap_max_lat
+    fix_min_lon = all_overlap_min_lon
+    fix_max_lon = all_overlap_max_lon
+
+    option = ''
+    while option != '0':
+         y, x = ready_screen("settings_screen", note)
+         screen.addstr(1, 1, header)
+         screen.addstr(3, 1, "INFORMATION")
+         screen.addstr(4, 1, "===========")
+         screen.addstr(6, 2, "Number of model file:   {0}".format(str(len(model_datasets))))
+         screen.addstr(7, 2, "Number of observation:  {0}".format(str(len(observations_info))))
+         screen.addstr(8, 2, "Temporal Boundaries:")
+         screen.addstr(9, 5, "Start time = {0}".format(all_overlap_start_time))
+         screen.addstr(10, 5, "End time = {0}".format(all_overlap_end_time))
+         screen.addstr(11, 2, "Spatial Boundaries:")
+         screen.addstr(12, 5, "min-lat = {0}".format(all_overlap_min_lat))
+         screen.addstr(13, 5, "max-lat = {0}".format(all_overlap_max_lat))
+         screen.addstr(14, 5, "min-lon = {0}".format(all_overlap_min_lon))
+         screen.addstr(15, 5, "max-lon = {0}".format(all_overlap_max_lon))
+         screen.addstr(16, 2, "Temporal Resolution:")
+         screen.addstr(17, 5, "Model = {0}".format(model_temp_res))
+         screen.addstr(18, 5, "Observation = {0}".format(obs_temp_res))
+         screen.addstr(19, 2, "Spatial Resolution:")
+         screen.addstr(20, 5, "Model:")
+         screen.addstr(21, 10, "lat = {0}".format(model_lat_res))
+         screen.addstr(22, 10, "lon = {0}".format(model_lon_res))
+         screen.addstr(23, 5, "Observation:")
+         screen.addstr(24, 10, "lat = {0}".format(obs_lat_res))
+         screen.addstr(25, 10, "lon = {0}".format(obs_lon_res))
+         screen.addstr(26, 2, "Temporal Grid Option:  {0}".format(temp_grid_option))
+         screen.addstr(27, 2, "Spatial Grid Option:   {0}".format(spatial_grid_option))
+         screen.addstr(28, 2, "Reference Dataset: {0}".format(reference_dataset))
+         screen.addstr(29, 2, "Target Dataset/s: {0}".format([mod for mod in target_datasets]))
+         screen.addstr(30, 2, "Working Directory:")
+         screen.addstr(31, 5, "{0}".format(working_directory))
+         screen.addstr(32, 2, "Metric: {0}".format(metric))
+         screen.addstr(33, 2, "Plot: {0}".format(plots[metric]))
+
+         screen.addstr(3, x/2, "MODIFICATION and RUN")
+         screen.addstr(4, x/2, "====================")
+         screen.addstr(6, x/2, "1 - Change Temporal Boundaries")
+         screen.addstr(7, x/2, "2 - Change Spatial Boundaries")
+         screen.addstr(8, x/2, "3 - Change Temporal Gridding")
+         screen.addstr(9, x/2, "4 - Change Spatial Gridding")
+         screen.addstr(10, x/2, "5 - Change Reference dataset")
+         screen.addstr(11, x/2, "6 - Change Target dataset/s")
+         screen.addstr(12, x/2, "7 - Change Metric")
+         screen.addstr(13, x/2, "8 - Change Working Directory")
+         #screen.addstr(14, x/2, "9 - Change Plot Title [Coming Soon....]")
+         #screen.addstr(15, x/2, "10 - Save the processed data [Coming Soon....]")
+         screen.addstr(14, x/2, "9 - Show Temporal Boundaries")
+         screen.addstr(15, x/2, "10 - Show Spatial Boundaries")
+         screen.addstr(16, x/2, "0 - Return to Main Menu")
+         screen.addstr(18, x/2, "r - Run Evaluation")
+         screen.addstr(20, x/2, "Select an option: ")
+
+         screen.refresh()
+         option = screen.getstr()
+
+         if option == '1':
+              screen.addstr(25, x/2, "Enter Start Time [min time: {0}] (Format YYYY-MM-DD):".format(fix_min_time))
+              new_start_time = screen.getstr()
+              try:
+                   new_start_time = datetime.strptime(new_start_time, '%Y-%m-%d')
+                   new_start_time_int = int("{0}{1}".format(new_start_time.year, new_start_time.month))
+                   fix_min_time_int = int("{0}{1}".format(fix_min_time.year, fix_min_time.month))
+                   fix_max_time_int = int("{0}{1}".format(fix_max_time.year, fix_max_time.month))
+                   all_overlap_end_time_int = int("{0}{1}".format(all_overlap_end_time.year, all_overlap_end_time.month))
+                   if new_start_time_int < fix_min_time_int \
+                   or new_start_time_int > fix_max_time_int \
+                   or new_start_time_int > all_overlap_end_time_int:
+                        note = "Start time has not changed. "
+                   else:
+                        all_overlap_start_time = new_start_time
+                        note = "Start time has changed successfully. "
+              except:
+                   note = "Start time has not changed. "
+              screen.addstr(26, x/2, "Enter End Time [max time:{0}] (Format YYYY-MM-DD):".format(fix_max_time))
+              new_end_time = screen.getstr()
+              try:
+                   new_end_time = datetime.strptime(new_end_time, '%Y-%m-%d')
+                   new_end_time_int = int("{0}{1}".format(new_end_time.year, new_end_time.month))
+                   fix_min_time_int = int("{0}{1}".format(fix_min_time.year, fix_min_time.month))
+                   fix_max_time_int = int("{0}{1}".format(fix_max_time.year, fix_max_time.month))
+                   all_overlap_start_time_int = int("{0}{1}".format(all_overlap_start_time.year, all_overlap_start_time.month))
+                   if new_end_time_int > fix_max_time_int \
+                   or new_end_time_int < fix_min_time_int \
+                   or new_end_time_int < all_overlap_start_time_int:
+                        note = note + " End time has not changed. "
+                   else:
+                        all_overlap_end_time = new_end_time
+                        note = note + " End time has changed successfully. "
+              except:
+                   note = note + " End time has not changed. "
+
+         if option == '2':
+              screen.addstr(25, x/2, "Enter Minimum Latitude [{0}]:".format(fix_min_lat))
+              new_min_lat = screen.getstr()
+              try:
+                   new_min_lat = float(new_min_lat)
+                   if new_min_lat < fix_min_lat or new_min_lat > fix_max_lat or new_min_lat > all_overlap_max_lat:
+                        note = "Minimum latitude has not changed. "
+                   else:
+                        all_overlap_min_lat = new_min_lat
+                        note = "Minimum latitude has changed successfully. "
+              except:
+                   note = "Minimum latitude has not changed. "
+              screen.addstr(26, x/2, "Enter Maximum Latitude [{0}]:".format(fix_max_lat))
+              new_max_lat = screen.getstr()
+              try:
+                   new_max_lat = float(new_max_lat)
+                   if new_max_lat > fix_max_lat or new_max_lat < fix_min_lat or new_max_lat < all_overlap_min_lat:
+                        note = note + " Maximum latitude has not changed. "
+                   else:
+                        all_overlap_max_lat = new_max_lat
+                        note = note + "Maximum latitude has changed successfully. "
+              except:
+                   note = note + " Maximum latitude has not changed. "
+              screen.addstr(27, x/2, "Enter Minimum Longitude [{0}]:".format(fix_min_lon))
+              new_min_lon = screen.getstr()
+              try:
+                   new_min_lon = float(new_min_lon)
+                   if new_min_lon < fix_min_lon or new_min_lon > fix_max_lon or new_min_lon > all_overlap_max_lon:
+                        note = note + " Minimum longitude has not changed. "
+                   else:
+                        all_overlap_min_lon = new_min_lon
+                        note = note + "Minimum longitude has changed successfully. "
+              except:
+                   note = note + " Minimum longitude has not changed. "
+              screen.addstr(28, x/2, "Enter Maximum Longitude [{0}]:".format(fix_max_lon))
+              new_max_lon = screen.getstr()
+              try:
+                   new_max_lon = float(new_max_lon)
+                   if new_max_lon > fix_max_lon or new_max_lon < fix_min_lon or new_max_lon < all_overlap_min_lon:
+                        note = note + " Maximum longitude has not changed. "
+                   else:
+                        all_overlap_max_lon = new_max_lon
+                        note = note + "Maximum longitude has changed successfully. "
+              except:
+                   note = note + " Maximum longitude has not changed. "
+
+         if option == '3':
+              screen.addstr(25, x/2, "Enter Temporal Gridding Option [Model or Observation]:")
+              new_temp_grid_option = screen.getstr()
+              if new_temp_grid_option.lower() == 'model':
+                   temp_grid_option = 'Model'
+                   temp_grid_setting = model_temp_res
+                   note = "Temporal gridding option has changed successfully to {0}".format(temp_grid_option)
+              elif new_temp_grid_option.lower() == 'observation':
+                   temp_grid_option = 'Observation'
+                   temp_grid_setting = obs_temp_res
+                   note = "Temporal gridding option has changed successfully to {0}".format(temp_grid_option)
+              else:
+                   note = "Temporal gridding option has not changed."
+
+         if option == '4':
+              screen.addstr(25, x/2, "Enter Spatial Gridding Option [Model, Observation or User]:")
+              new_spatial_grid_option = screen.getstr()
+              if new_spatial_grid_option.lower() == 'model':
+                   spatial_grid_option = 'Model'
+                   spatial_grid_setting_lat = model_lat_res
+                   spatial_grid_setting_lon = model_lon_res
+                   note = "Spatial gridding option has changed successfully to {0}".format(spatial_grid_option)
+              elif new_spatial_grid_option.lower() == 'observation':
+                   spatial_grid_option = 'Observation'
+                   spatial_grid_setting_lat = obs_lat_res
+                   spatial_grid_setting_lon = obs_lon_res
+                   note = "Spatial gridding option has changed successfully to {0}".format(spatial_grid_option)
+              elif new_spatial_grid_option.lower() == 'user':
+                   screen.addstr(26, x/2, "Please enter latitude spatial resolution: ")
+                   user_lat_res = screen.getstr()
+                   screen.addstr(27, x/2, "Please enter longitude spatial resolution: ")
+                   user_lon_res = screen.getstr()
+                   try:
+                        user_lat_res = float(user_lat_res)
+                        user_lon_res = float(user_lon_res)
+                        spatial_grid_option = 'User: resolution lat:{0}, lon:{1}'.format(str(user_lat_res), str(user_lon_res))
+                        spatial_grid_setting_lat = user_lat_res
+                        spatial_grid_setting_lon = user_lon_res
+                        note = "Spatial gridding option has changed successfully to user defined."
+                   except:
+                        note = "Spatial gridding option has not changed."
+              else:
+                   note = "Spatial gridding option has not changed."
+
+         if option == '5':
+              screen.addstr(25, x/2, "Model/s:")
+              for each in enumerate(models_dict):
+                   screen.addstr(26 + each[0], x/2 + 2, "{0}: {1}".format(each[1], models_dict[each[1]]['directory'].split("/")[-1]))
+              screen.addstr(26 + len(models_dict), x/2, "Observation/s:")
+              for each in enumerate(obs_dict):
+                   screen.addstr(27 + len(models_dict) + each[0], x/2 + 2, "{0}: {1} - ({2})".format(each[1], obs_dict[each[1]]['database'], obs_dict[each[1]]['unit']))
+              screen.addstr(27 + len(obs_dict) + len(models_dict), x/2, "Please select reference dataset:")
+              selected_reference = screen.getstr()
+              if selected_reference in models_dict:
+                   reference_dataset = selected_reference
+                   note = "Reference dataset successfully changed."
+              elif selected_reference in obs_dict:
+                   reference_dataset = selected_reference
+                   note = "Reference dataset successfully changed."
+              else:
+                   note = "Reference dataset did not change."
+
+         if option == '6':
+              screen.addstr(25, x/2, "Model/s:")
+              for each in enumerate(models_dict):
+                   screen.addstr(26 + each[0], x/2 + 2, "{0}: {1}".format(each[1], models_dict[each[1]]['directory'].split("/")[-1]))
+              screen.addstr(26 + len(models_dict), x/2, "Observation/s:")
+              for each in enumerate(obs_dict):
+                   screen.addstr(27 + len(models_dict) + each[0], x/2 + 2, "{0}: {1} - ({2})".format(each[1], obs_dict[each[1]]['database'], obs_dict[each[1]]['unit']))
+              screen.addstr(27 + len(obs_dict) + len(models_dict), x/2, "Please enter target dataset/s (comma separated for multi target):")
+              selected_target = screen.getstr()
+              selected_target = selected_target.split(",")
+              if selected_target != ['']:
+                   target_datasets = []
+                   for target in selected_target:
+                        if target in models_dict:
+                             target_datasets.append(target)
+                             note = "Target dataset successfully changed."
+                        elif target in obs_dict:
+                             target_datasets.append(target)
+                             note = "Target dataset successfully changed."
+                        else:
+                             note = "Target dataset did not change."
+
+         if option == '7':
+              screen.addstr(25, x/2, "Available metrics:")
+              for i in enumerate(sorted(metrics_dict, key=metrics_dict.get)):
+                   screen.addstr(26 + i[0], x/2 + 2, "[{0}] - {1}".format(i[1], metrics_dict[i[1]]))
+              screen.addstr(26 + len(metrics_dict), x/2, "Please select a metric:")
+              metric_id = screen.getstr()
+              if metric_id in metrics_dict:
+                   metric = metrics_dict[metric_id]
+                   note = "Metric sucessfully changed to {0}".format(metric)
+              else:
+                   note = "Metric has not changes"
+
+         if option == '8':
+              screen.addstr(25, x/2, "Please enter working directory path:")
+              working_directory = screen.getstr()
+              if working_directory:
+                   if working_directory[-1] != '/':
+                        working_directory = working_directory + "/"
+              else:
+                   note = "Working directory has not changed"
+
+         if option == '9':
+              screen.addstr(25, x/2, "Please enter plot title:")
+              plot_title = screen.getstr()
+
+         #if option == '10':
+         #     screen.addstr(25, x/2, "Please enter plot title:")
+         #     plot_title = screen.getstr()
+
+         if option == '9':
+              models_start_time, models_end_time = get_models_temp_bound()
+              line = 25
+              for i, model in enumerate(model_datasets):
+                   mode_name = models_info[i]['directory'].split("/")[-1]
+                   line += 1
+                   screen.addstr(line, x/2, "{0}".format(mode_name))
+                   line += 1
+                   screen.addstr(line, x/2 + 3, "Start:{0} - End:{1}".format(models_start_time[i], models_end_time[i]))
+
+              observations_start_time, observations_end_time = get_obs_temp_bound()
+              for i, obs in enumerate(observations_info):
+                   line += 1
+                   screen.addstr(line, x/2, "{0}".format(observations_info[i]['database']))
+                   line += 1
+                   screen.addstr(line, x/2 + 3, "Start:{0} - End:{1}".format(observations_start_time[i], observations_end_time[i]))
+              screen.getstr()
+
+         if option == '10':
+              models_bound = get_models_spatial_bound()
+              line = 25
+              for i, model in enumerate(model_datasets):
+                   mode_name = models_info[i]['directory'].split("/")[-1]
+                   line += 1
+                   screen.addstr(line, x/2, "{0}".format(mode_name))
+                   line += 1
+                   screen.addstr(line, x/2 + 3, "{0}".format(models_bound[i]))
+
+              observations_bound = get_obs_spatial_bound()
+              for i, obs in enumerate(observations_info):
+                   line += 1
+                   screen.addstr(line, x/2, "{0}".format(observations_info[i]['database']))
+                   line += 1
+                   screen.addstr(line, x/2 + 3, "{0}".format(observations_bound[i]))
+              screen.getstr()
+
+         if option.lower() == 'r':
+              note = run_screen(model_datasets, models_info, observations_info, all_overlap_start_time, all_overlap_end_time, \
+                         all_overlap_min_lat, all_overlap_max_lat, all_overlap_min_lon, all_overlap_max_lon, \
+                         temp_grid_setting, spatial_grid_setting_lat, spatial_grid_setting_lon, reference_dataset, target_datasets, metric, working_directory, plot_title)
+
+
+##############################################################
+#     Main Menu Screen
+##############################################################
+
+def main_menu(model_datasets, models_info, observation_datasets, observations_info, note=""):
+    '''This function Generates main menu page.
+
+    :param model_datasets: list of model dataset objects
+    :type model_datasets: list
+    :param models_info: list of dictionaries that contain information for each model
+    :type models_info: list
+    :param observation_datasets: list of observation dataset objects
+    :type observation_datasets: list
+    :param observations_info: list of dictionaries that contain information for each observation
+    :type observations_info: list
+    '''
+
+    option = ''
+    while option != '0':
+         ready_screen("main_menu", note)
+         model_status = "NC" if len(model_datasets) == 0 else "C"     #NC (Not Complete), if there is no model added, C (Complete) if model is added
+         obs_status = "NC" if len(observations_info) == 0 else "C"    #NC (Not Complete), if there is no observation added, C (Complete) if observation is added
+         screen.addstr(1, 1, "Main Menu:")
+         screen.addstr(4, 4, "1 - Manage Model ({0})".format(model_status))
+         screen.addstr(6, 4, "2 - Manage Observation ({0})".format(obs_status))
+         screen.addstr(8, 4, "3 - Run")
+         screen.addstr(10, 4, "0 - EXIT")
+         screen.addstr(16, 2, "Select an option: ")
+         screen.refresh()
+         option = screen.getstr()
+
+         if option == '1':
+              header = "Main Menu > Manage Model"
+              manage_model_screen(header)
+         if option == '2':
+              header = "Main Menu > Manage Observation"
+              manage_obs_screen(header)
+         if option == '3':
+              if model_status == 'NC' or obs_status == 'NC':
+                   main_menu(model_datasets, models_info, observation_datasets, observations_info, note="WARNING: Please complete step 1 and 2 before 3.")
+              else:
+                   header = "Main Menu > Run"
+                   settings_screen(header)
+    curses.endwin()
+    sys.exit()
+
+
+if __name__ == '__main__':
+     TITLE = "RCMES CLI"
+     ORGANIZATION = "JPL/NASA - JIFRESSE/UCLA"
+     screen = curses.initscr()
+     model_datasets = []           #list of model dataset objects
+     models_info = []              #list of dictionaries that contain information for each model
+     observation_datasets = []     #list of observation dataset objects
+     observations_info = []        #list of dictionaries that contain information for each observation
+     main_menu(model_datasets, models_info, observation_datasets, observations_info)

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_cloud_fraction_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
new file mode 100644
index 0000000..eb4b4c5
--- /dev/null
+++ b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+workdir: ./
+output_netcdf_filename: cordex-arctic_clt_MAR-SEP.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 3
+    month_end: 9
+    average_each_year: False
+
+space:
+    min_lat: 55.00 
+    max_lat: 89.5 
+    min_lon: -179.75
+    max_lon: 178.50
+
+regrid:
+    regrid_on_reference: True
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
+        variable: cld_frac
+        multiplying_factor: 100.0
+
+    targets:
+        data_source: local
+        path: /home/huikyole/data/CORDEX-ARC/clt*.nc                                                    
+        variable: clt     
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-arctic_clt_MAR-SEP_mean_bias_to_SRB
+    subplots_array: !!python/tuple [2,2] 
+    map_projection: npstere
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlds_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlds_bias_to_SRB.yaml
new file mode 100644
index 0000000..1311843
--- /dev/null
+++ b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlds_bias_to_SRB.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+workdir: ./
+output_netcdf_filename: cordex-arctic_rlds_JUL.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 7
+    month_end: 7
+    average_each_year: False
+
+space:
+    min_lat: 55.00 
+    max_lat: 89.5 
+    min_lon: -179.75
+    max_lon: 178.50
+
+regrid:
+    regrid_on_reference: True
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
+        variable: lw_sfc_dn
+        multiplying_factor: 1
+
+    targets:
+        data_source: local
+        path: /home/huikyole/data/CORDEX-ARC/rlds*.nc                                                    
+        variable: rlds    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-arctic_rlds_JUL_mean_bias_to_SRB
+    subplots_array: !!python/tuple [1,2] 
+    map_projection: npstere
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlus_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlus_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlus_bias_to_SRB.yaml
new file mode 100644
index 0000000..b03738a
--- /dev/null
+++ b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rlus_bias_to_SRB.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+workdir: ./
+output_netcdf_filename: cordex-arctic_rlus_JUL.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 7
+    month_end: 7
+    average_each_year: False
+
+space:
+    min_lat: 55.00 
+    max_lat: 89.5 
+    min_lon: -179.75
+    max_lon: 178.50
+
+regrid:
+    regrid_on_reference: True
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
+        variable: lw_sfc_up
+        multiplying_factor: 1
+
+    targets:
+        data_source: local
+        path: /home/huikyole/data/CORDEX-ARC/rlus*.nc                                                    
+        variable: rlus    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-arctic_rlus_JUL_mean_bias_to_SRB
+    subplots_array: !!python/tuple [2,2] 
+    map_projection: npstere
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rsds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rsds_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rsds_bias_to_SRB.yaml
new file mode 100644
index 0000000..9613e46
--- /dev/null
+++ b/RCMES/configuration_files/CORDEX-ARCTIC/cordex-arctic_rsds_bias_to_SRB.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+workdir: ./
+output_netcdf_filename: cordex-arctic_rsds_JUL.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 7
+    month_end: 7
+    average_each_year: False
+
+space:
+    min_lat: 55.00 
+    max_lat: 89.5 
+    min_lon: -179.75
+    max_lon: 178.50
+
+regrid:
+    regrid_on_reference: True
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
+        variable: sw_sfc_dn
+        multiplying_factor: 1
+
+    targets:
+        data_source: local
+        path: /home/huikyole/data/CORDEX-ARC/rsds*.nc                                                    
+        variable: rsds    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-arctic_rsds_JUL_mean_bias_to_SRB
+    subplots_array: !!python/tuple [2,2] 
+    map_projection: npstere
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig10_and_Fig11.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig10_and_Fig11.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig10_and_Fig11.yaml
new file mode 100644
index 0000000..0650e61
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig10_and_Fig11.yaml
@@ -0,0 +1,81 @@
+workdir: ./
+output_netcdf_filename: narccap_prec_monthly_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: False
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ../data/prec*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 2
+
+metrics1: Timeseries_plot_subregion_annual_cycle
+
+plots1:
+    file_name: Fig10
+    subplots_array: !!python/tuple [7,2]
+
+metrics2: Portrait_diagram_subregion_annual_cycle                
+
+plots2:
+    file_name: Fig11
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/RCMES/configuration_files/NARCCAP_paper/Fig12_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_paper/Fig12_summer.yaml b/RCMES/configuration_files/NARCCAP_paper/Fig12_summer.yaml
new file mode 100644
index 0000000..f11c136
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_paper/Fig12_summer.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_prec_JJA_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 6
+    month_end: 8
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ../data/prec*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig12_summer
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]


[2/7] climate git commit: CLIMATE-720 - Revise file structure

Posted by hu...@apache.org.
http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml b/examples/configuration_file_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
deleted file mode 100644
index eb4b4c5..0000000
--- a/examples/configuration_file_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_clt_MAR-SEP.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 3
-    month_end: 9
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
-        variable: cld_frac
-        multiplying_factor: 100.0
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/clt*.nc                                                    
-        variable: clt     
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_clt_MAR-SEP_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-arctic_rlds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-arctic_rlds_bias_to_SRB.yaml b/examples/configuration_file_examples/cordex-arctic_rlds_bias_to_SRB.yaml
deleted file mode 100644
index 1311843..0000000
--- a/examples/configuration_file_examples/cordex-arctic_rlds_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rlds_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
-        variable: lw_sfc_dn
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rlds*.nc                                                    
-        variable: rlds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rlds_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [1,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-arctic_rlus_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-arctic_rlus_bias_to_SRB.yaml b/examples/configuration_file_examples/cordex-arctic_rlus_bias_to_SRB.yaml
deleted file mode 100644
index b03738a..0000000
--- a/examples/configuration_file_examples/cordex-arctic_rlus_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rlus_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
-        variable: lw_sfc_up
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rlus*.nc                                                    
-        variable: rlus    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rlus_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-arctic_rsds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-arctic_rsds_bias_to_SRB.yaml b/examples/configuration_file_examples/cordex-arctic_rsds_bias_to_SRB.yaml
deleted file mode 100644
index 9613e46..0000000
--- a/examples/configuration_file_examples/cordex-arctic_rsds_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rsds_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
-        variable: sw_sfc_dn
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rsds*.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rsds_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml b/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
deleted file mode 100644
index 9483cae..0000000
--- a/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
+++ /dev/null
@@ -1,90 +0,0 @@
-workdir: ./
-output_netcdf_filename: cordex_AF_prec_monthly_mean_1990-2007.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1998-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: -45.76
-    max_lat: 42.24
-    min_lon: -24.64
-    max_lon: 60.28
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU  
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/AFRICA*pr.nc                                
-        variable: pr    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Timeseries_plot_subregion_annual_cycle
-
-plots1:
-    file_name: cordex_AF_prec_subregion_annual_cycle_time_series
-    subplots_array: !!python/tuple [7,3]
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01:
-      [29.0, 36.5, -10.0, 0.0]
-    R02:
-      [29, 37.5, 0, 10]
-    R03:
-      [25, 32.5, 10, 20]
-    R04:
-      [25, 32.5, 20, 33]
-    R05:
-      [12, 20.0, -19.3, -10.2]
-    R06:
-      [15, 25.0, 15, 30]
-    R07:
-      [7.3, 15,  -10, 10]
-    R08:
-      [5, 7.3,  -10, 10]
-    R09:
-      [6.9, 15, 33.9, 40]
-    R10:
-      [2.2, 11.8, 44.2, 51.8]
-    R11:
-      [0, 10, 10, 25]
-    R12:
-      [-10, 0, 10, 25]
-    R13:
-      [-15, 0, 30, 40]
-    R14:
-      [-27.9, -21.4, 13.6, 20]
-    R15:
-      [-35, -27.9, 13.6, 20]
-    R16:
-      [-35, -21.4, 20, 35.7]
-    R17:
-      [-25.8, -11.7, 43.2, 50.3]
-    R18:
-      [25, 35.0, 33, 40]
-    R19:
-      [28, 35, 45, 50]
-    R20:
-      [13, 20.0, 43, 50]
-    R21:
-      [20, 27.5, 50, 58]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/metrics_and_plots.py
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/metrics_and_plots.py b/examples/configuration_file_examples/metrics_and_plots.py
deleted file mode 100644
index 6e00b0f..0000000
--- a/examples/configuration_file_examples/metrics_and_plots.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#Apache OCW lib immports
-import ocw.dataset as ds
-import ocw.data_source.local as local
-import ocw.plotter as plotter
-import ocw.utils as utils
-from ocw.evaluation import Evaluation
-import ocw.metrics as metrics
-
-# Python libraries
-import numpy as np
-import numpy.ma as ma
-import matplotlib.pyplot as plt
-from mpl_toolkits.basemap import Basemap 
-from matplotlib import rcParams
-from matplotlib.patches import Polygon
-import string
-
-def Map_plot_bias_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
-                                      file_name, row, column, map_projection=None):
-    '''Draw maps of observed multi-year climatology and biases of models"'''
-
-    # calculate climatology of observation data
-    obs_clim = utils.calc_temporal_mean(obs_dataset)
-    # determine the metrics
-    map_of_bias = metrics.TemporalMeanBias()
-
-    # create the Evaluation object
-    bias_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
-                                 model_datasets, # list of target datasets for the evaluation
-                                 [map_of_bias, map_of_bias])
-    # run the evaluation (bias calculation)
-    bias_evaluation.run() 
-
-    rcm_bias = bias_evaluation.results[0]
-
-    fig = plt.figure()
-
-    lat_min = obs_dataset.lats.min()
-    lat_max = obs_dataset.lats.max()
-    lon_min = obs_dataset.lons.min()
-    lon_max = obs_dataset.lons.max()
-
-    string_list = list(string.ascii_lowercase) 
-    ax = fig.add_subplot(row,column,1)
-    if map_projection == 'npstere':
-        m = Basemap(ax=ax, projection ='npstere', boundinglat=lat_min, lon_0=0,
-            resolution = 'l', fix_aspect=False)
-    else:
-        m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
-            llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
-    lons, lats = np.meshgrid(obs_dataset.lons, obs_dataset.lats)
-
-    x,y = m(lons, lats)
-
-    m.drawcoastlines(linewidth=1)
-    m.drawcountries(linewidth=1)
-    m.drawstates(linewidth=0.5, color='w')
-    max = m.contourf(x,y,obs_clim,levels = plotter._nice_intervals(obs_dataset.values, 10), extend='both',cmap='rainbow')
-    ax.annotate('(a) \n' + obs_name,xy=(lon_min, lat_min))
-    cax = fig.add_axes([0.02, 1.-float(1./row), 0.01, 1./row*0.6])
-    plt.colorbar(max, cax = cax) 
-    clevs = plotter._nice_intervals(rcm_bias, 11)
-    for imodel in np.arange(len(model_datasets)):
-
-        ax = fig.add_subplot(row, column,2+imodel)
-        if map_projection == 'npstere':
-            m = Basemap(ax=ax, projection ='npstere', boundinglat=lat_min, lon_0=0,
-                resolution = 'l', fix_aspect=False)
-        else:
-            m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
-                llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
-        m.drawcoastlines(linewidth=1)
-        m.drawcountries(linewidth=1)
-        m.drawstates(linewidth=0.5, color='w')
-        max = m.contourf(x,y,rcm_bias[imodel,:],levels = clevs, extend='both', cmap='RdBu_r')
-        ax.annotate('('+string_list[imodel+1]+')  \n '+model_names[imodel],xy=(lon_min, lat_min))
-
-    cax = fig.add_axes([0.91, 0.1, 0.015, 0.8])
-    plt.colorbar(max, cax = cax) 
-
-    plt.subplots_adjust(hspace=0.01,wspace=0.05)
-
-    fig.savefig(file_name,dpi=600,bbox_inches='tight')
-
-def Taylor_diagram_spatial_pattern_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
-                                      file_name):
-
-    # calculate climatological mean fields
-    obs_clim_dataset = ds.Dataset(obs_dataset.lats, obs_dataset.lons, obs_dataset.times, utils.calc_temporal_mean(obs_dataset))
-    model_clim_datasets = []
-    for dataset in model_datasets:
-        model_clim_datasets.append(ds.Dataset(dataset.lats, dataset.lons, dataset.times, utils.calc_temporal_mean(dataset)))
-
-    # Metrics (spatial standard deviation and pattern correlation)
-    # determine the metrics
-    taylor_diagram = metrics.SpatialPatternTaylorDiagram()
-
-    # create the Evaluation object
-    taylor_evaluation = Evaluation(obs_clim_dataset, # Climatological mean of reference dataset for the evaluation
-                                 model_clim_datasets, # list of climatological means from model datasets for the evaluation
-                                 [taylor_diagram])
-
-    # run the evaluation (bias calculation)
-    taylor_evaluation.run() 
-
-    taylor_data = taylor_evaluation.results[0]
-
-    plotter.draw_taylor_diagram(taylor_data, model_names, obs_name, file_name, pos='upper right',frameon=False)
-
-def Time_series_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle, 
-                          file_name, row, column, x_tick=['']):
-
-    nmodel, nt, nregion = model_subregion_mean.shape  
-
-    if seasonal_cycle:
-        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
-        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
-        nt = 12
-    else:
-        obs_data = obs_subregion_mean
-        model_data = model_subregion_mean
-        
-    x_axis = np.arange(nt)
-    x_tick_values = x_axis
-
-    fig = plt.figure()
-    rcParams['xtick.labelsize'] = 6
-    rcParams['ytick.labelsize'] = 6
-  
-    for iregion in np.arange(nregion):
-        ax = fig.add_subplot(row, column, iregion+1) 
-        x_tick_labels = ['']
-        if iregion+1  > column*(row-1):
-            x_tick_labels = x_tick 
-        else:
-            x_tick_labels=['']
-        ax.plot(x_axis, obs_data[0, :, iregion], color='r', lw=2, label=obs_name)
-        for imodel in np.arange(nmodel):
-            ax.plot(x_axis, model_data[imodel, :, iregion], lw=0.5, label = model_names[imodel])
-        ax.set_xlim([-0.5,nt-0.5])
-        ax.set_xticks(x_tick_values)
-        ax.set_xticklabels(x_tick_labels)
-        ax.set_title('Region %02d' % (iregion+1), fontsize=8)
-    
-    ax.legend(bbox_to_anchor=(-0.2, row/2), loc='center' , prop={'size':7}, frameon=False)  
-
-    fig.subplots_adjust(hspace=0.7, wspace=0.5)
-    fig.savefig(file_name, dpi=600, bbox_inches='tight')
-
-def Portrait_diagram_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle,
-                               file_name, normalize=True):
-
-    nmodel, nt, nregion = model_subregion_mean.shape
-    
-    if seasonal_cycle:
-        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
-        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
-        nt = 12
-    else:
-        obs_data = obs_subregion_mean
-        model_data = model_subregion_mean
-
-    subregion_metrics = ma.zeros([4, nregion, nmodel])
-
-    for imodel in np.arange(nmodel):
-        for iregion in np.arange(nregion):
-            # First metric: bias
-            subregion_metrics[0, iregion, imodel] = metrics.calc_bias(model_data[imodel, :, iregion], obs_data[0, :, iregion], average_over_time = True)
-            # Second metric: standard deviation
-            subregion_metrics[1, iregion, imodel] = metrics.calc_stddev_ratio(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-            # Third metric: RMSE
-            subregion_metrics[2, iregion, imodel] = metrics.calc_rmse(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-            # Fourth metric: correlation
-            subregion_metrics[3, iregion, imodel] = metrics.calc_correlation(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-   
-    if normalize:
-        for iregion in np.arange(nregion):
-            subregion_metrics[0, iregion, : ] = subregion_metrics[0, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
-            subregion_metrics[1, iregion, : ] = subregion_metrics[1, iregion, : ]*100. 
-            subregion_metrics[2, iregion, : ] = subregion_metrics[2, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
-
-    region_names = ['R%02d' % i for i in np.arange(nregion)+1]
-
-    for imetric, metric in enumerate(['bias','std','RMSE','corr']):
-        plotter.draw_portrait_diagram(subregion_metrics[imetric, :, :], region_names, model_names, file_name+'_'+metric, 
-                                      xlabel='model',ylabel='region')             
-
-def Map_plot_subregion(subregions, ref_dataset, directory):
-  
-    lons, lats = np.meshgrid(ref_dataset.lons, ref_dataset.lats) 
-    fig = plt.figure()
-    ax = fig.add_subplot(111)
-    m = Basemap(ax=ax, projection='cyl',llcrnrlat = lats.min(), urcrnrlat = lats.max(),
-                llcrnrlon = lons.min(), urcrnrlon = lons.max(), resolution = 'l')
-    m.drawcoastlines(linewidth=0.75)
-    m.drawcountries(linewidth=0.75)
-    m.etopo()  
-    x, y = m(lons, lats) 
-    #subregion_array = ma.masked_equal(subregion_array, 0)
-    #max=m.contourf(x, y, subregion_array, alpha=0.7, cmap='Accent')
-    for subregion in subregions:
-        draw_screen_poly(subregion[1], m, 'w') 
-        plt.annotate(subregion[0],xy=(0.5*(subregion[1][2]+subregion[1][3]), 0.5*(subregion[1][0]+subregion[1][1])), ha='center',va='center', fontsize=8) 
-    fig.savefig(directory+'map_subregion', bbox_inches='tight')
-
-def draw_screen_poly(boundary_array, m, linecolor='k'):
-
-    ''' Draw a polygon on a map
-
-    :param boundary_array: [lat_north, lat_south, lon_east, lon_west]
-    :param m   : Basemap object
-    '''
-
-    lats = [boundary_array[0], boundary_array[0], boundary_array[1], boundary_array[1]]
-    lons = [boundary_array[3], boundary_array[2], boundary_array[2], boundary_array[3]]
-    x, y = m( lons, lats )
-    xy = zip(x,y)
-    poly = Polygon( xy, facecolor='none',edgecolor=linecolor )
-    plt.gca().add_patch(poly)
-    
-    
-   
-
-    
-
-    

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml b/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
deleted file mode 100644
index c6b96cf..0000000
--- a/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
+++ /dev/null
@@ -1,44 +0,0 @@
-workdir: ./                                      
-output_netcdf_filename: narccap_prec_JJA_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True  
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/prec.*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots1:
-    file_name: narccap_prec_JJA_mean_taylor_diagram_to_cru
-
-use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml b/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
deleted file mode 100644
index de2d98e..0000000
--- a/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_tas_DJF_mean_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ./data/temp*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: narccap_tas_DJF_subregion_interannual_variability_portrait_diagram
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/run_RCMES.py
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/run_RCMES.py b/examples/configuration_file_examples/run_RCMES.py
deleted file mode 100644
index 1054446..0000000
--- a/examples/configuration_file_examples/run_RCMES.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#Apache OCW lib immports
-import ocw.dataset_processor as dsp
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-import ocw.plotter as plotter
-import ocw.utils as utils
-from ocw.dataset import Bounds
-
-import matplotlib.pyplot as plt
-from matplotlib import rcParams
-import numpy as np
-import numpy.ma as ma
-import yaml
-from glob import glob
-import operator
-from dateutil import parser
-from datetime import datetime
-import os
-import sys
-
-from metrics_and_plots import *
-
-import ssl
-if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
-
-config_file = str(sys.argv[1])
-
-print 'Reading the configuration file ', config_file
-config = yaml.load(open(config_file))
-time_info = config['time']
-temporal_resolution = time_info['temporal_resolution']
-
-start_time = datetime.strptime(time_info['start_time'].strftime('%Y%m%d'),'%Y%m%d')
-end_time = datetime.strptime(time_info['end_time'].strftime('%Y%m%d'),'%Y%m%d')
-
-space_info = config['space']
-min_lat = space_info['min_lat']
-max_lat = space_info['max_lat']
-min_lon = space_info['min_lon']
-max_lon = space_info['max_lon']
-
-""" Step 1: Load the reference data """
-ref_data_info = config['datasets']['reference']
-print 'Loading observation dataset:\n',ref_data_info
-ref_name = ref_data_info['data_name']
-if ref_data_info['data_source'] == 'local':
-    ref_dataset = local.load_file(ref_data_info['path'],
-                                  ref_data_info['variable'], name=ref_name)
-elif ref_data_info['data_source'] == 'rcmed':
-      ref_dataset = rcmed.parameter_dataset(ref_data_info['dataset_id'],
-                                            ref_data_info['parameter_id'],
-                                            min_lat, max_lat, min_lon, max_lon,
-                                            start_time, end_time)
-else:
-    print ' '
-    # TO DO: support ESGF
-
-ref_dataset =  dsp.normalize_dataset_datetimes(ref_dataset, temporal_resolution)
-if 'multiplying_factor' in ref_data_info.keys():
-    ref_dataset.values = ref_dataset.values*ref_data_info['multiplying_factor']
-
-""" Step 2: Load model NetCDF Files into OCW Dataset Objects """
-model_data_info = config['datasets']['targets']
-print 'Loading model datasets:\n',model_data_info
-if model_data_info['data_source'] == 'local':
-    model_datasets, model_names = local.load_multiple_files(file_path = model_data_info['path'],
-                                                            variable_name =model_data_info['variable'])
-else:
-    print ' '
-    # TO DO: support RCMED and ESGF
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.normalize_dataset_datetimes(dataset, temporal_resolution)
-
-""" Step 3: Subset the data for temporal and spatial domain """
-# Create a Bounds object to use for subsetting
-if time_info['maximum_overlap_period']:
-    start_time, end_time = utils.get_temporal_overlap([ref_dataset]+model_datasets)
-    print 'Maximum overlap period'
-    print 'start_time:', start_time
-    print 'end_time:', end_time
-
-if temporal_resolution == 'monthly' and end_time.day !=1:
-    end_time = end_time.replace(day=1)
-if ref_data_info['data_source'] == 'rcmed':
-    min_lat = np.max([min_lat, ref_dataset.lats.min()])
-    max_lat = np.min([max_lat, ref_dataset.lats.max()])
-    min_lon = np.max([min_lon, ref_dataset.lons.min()])
-    max_lon = np.min([max_lon, ref_dataset.lons.max()])
-bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
-
-if ref_dataset.lats.ndim !=2 and ref_dataset.lons.ndim !=2:
-    ref_dataset = dsp.subset(bounds,ref_dataset)
-else:
-    ref_dataset = dsp.temporal_slice(bounds.start, bounds.end, ref_dataset)
-for idata,dataset in enumerate(model_datasets):
-    if dataset.lats.ndim !=2 and dataset.lons.ndim !=2:
-        model_datasets[idata] = dsp.subset(bounds,dataset)
-    else:
-        model_datasets[idata] = dsp.temporal_slice(bounds.start, bounds.end, dataset)
-
-# Temporaly subset both observation and model datasets for the user specified season
-month_start = time_info['month_start']
-month_end = time_info['month_end']
-average_each_year = time_info['average_each_year']
-
-ref_dataset = dsp.temporal_subset(month_start, month_end,ref_dataset,average_each_year)
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.temporal_subset(month_start, month_end,dataset,average_each_year)
-
-# generate grid points for regridding
-if config['regrid']['regrid_on_reference']:
-    new_lat = ref_dataset.lats
-    new_lon = ref_dataset.lons 
-else:
-    delta_lat = config['regrid']['regrid_dlat']
-    delta_lon = config['regrid']['regrid_dlon']
-    nlat = (max_lat - min_lat)/delta_lat+1
-    nlon = (max_lon - min_lon)/delta_lon+1
-    new_lat = np.linspace(min_lat, max_lat, nlat)
-    new_lon = np.linspace(min_lon, max_lon, nlon)
-
-# number of models
-nmodel = len(model_datasets)
-print 'Dataset loading completed'
-print 'Observation data:', ref_name 
-print 'Number of model datasets:',nmodel
-for model_name in model_names:
-    print model_name
-
-""" Step 4: Spatial regriding of the reference datasets """
-print 'Regridding datasets: ', config['regrid']
-if not config['regrid']['regrid_on_reference']:
-    ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon)
-    print 'Reference dataset has been regridded'
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon)
-    print model_names[idata]+' has been regridded'
-
-print 'Propagating missing data information'
-ref_dataset = dsp.mask_missing_data([ref_dataset]+model_datasets)[0]
-model_datasets = dsp.mask_missing_data([ref_dataset]+model_datasets)[1:]
-
-""" Step 5: Checking and converting variable units """
-print 'Checking and converting variable units'
-ref_dataset = dsp.variable_unit_conversion(ref_dataset)
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.variable_unit_conversion(dataset)
-    
-
-print 'Generating multi-model ensemble'
-if len(model_datasets) >= 2.:
-    model_datasets.append(dsp.ensemble(model_datasets))
-    model_names.append('ENS')
-
-""" Step 6: Generate subregion average and standard deviation """
-if config['use_subregions']:
-    # sort the subregion by region names and make a list
-    subregions= sorted(config['subregions'].items(),key=operator.itemgetter(0))
-
-    # number of subregions
-    nsubregion = len(subregions)
-
-    print 'Calculating spatial averages and standard deviations of ',str(nsubregion),' subregions'
-
-    ref_subregion_mean, ref_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std([ref_dataset], subregions) 
-    model_subregion_mean, model_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std(model_datasets, subregions) 
-
-""" Step 7: Write a netCDF file """
-workdir = config['workdir']
-if workdir[-1] != '/':
-    workdir = workdir+'/'
-print 'Writing a netcdf file: ',workdir+config['output_netcdf_filename']
-if not os.path.exists(workdir):
-    os.system("mkdir "+workdir)
-
-if config['use_subregions']:
-    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets, model_names,
-                                                       path=workdir+config['output_netcdf_filename'],
-                                                       subregions=subregions, subregion_array = subregion_array, 
-                                                       ref_subregion_mean=ref_subregion_mean, ref_subregion_std=ref_subregion_std,
-                                                       model_subregion_mean=model_subregion_mean, model_subregion_std=model_subregion_std)
-else:
-    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets, model_names,
-                                                       path=workdir+config['output_netcdf_filename'])
-
-""" Step 8: Calculate metrics and draw plots """
-nmetrics = config['number_of_metrics_and_plots']
-if config['use_subregions']:
-    Map_plot_subregion(subregions, ref_dataset, workdir)
-
-if nmetrics > 0:
-    print 'Calculating metrics and generating plots'
-    for imetric in np.arange(nmetrics)+1:
-        metrics_name = config['metrics'+'%1d' %imetric]
-        plot_info = config['plots'+'%1d' %imetric]
-        file_name = workdir+plot_info['file_name']
-
-        print 'metrics '+str(imetric)+'/'+str(nmetrics)+': ', metrics_name
-        if metrics_name == 'Map_plot_bias_of_multiyear_climatology':
-            row, column = plot_info['subplots_array']
-            if 'map_projection' in plot_info.keys():
-                Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
-                                          file_name, row, column, map_projection=plot_info['map_projection'])
-            else:
-                Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
-                                          file_name, row, column)
-        elif metrics_name == 'Taylor_diagram_spatial_pattern_of_multiyear_climatology':
-            Taylor_diagram_spatial_pattern_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
-                                      file_name)
-        elif config['use_subregions']:
-            if metrics_name == 'Timeseries_plot_subregion_interannual_variability' and average_each_year:
-                row, column = plot_info['subplots_array']
-                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, False,
-                                      file_name, row, column, x_tick=['Y'+str(i+1) for i in np.arange(model_subregion_mean.shape[1])])
-            if metrics_name == 'Timeseries_plot_subregion_annual_cycle' and not average_each_year and month_start==1 and month_end==12:
-                row, column = plot_info['subplots_array']
-                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, True,
-                                      file_name, row, column, x_tick=['J','F','M','A','M','J','J','A','S','O','N','D'])
-            if metrics_name == 'Portrait_diagram_subregion_interannual_variability' and average_each_year:
-                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, False,
-                                      file_name)
-            if metrics_name == 'Portrait_diagram_subregion_annual_cycle' and not average_each_year and month_start==1 and month_end==12:
-                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, True,
-                                      file_name)
-        else:
-            print 'please check the currently supported metrics'
-
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/statistical_downscaling/MPI_tas_JJA.yaml
----------------------------------------------------------------------
diff --git a/examples/statistical_downscaling/MPI_tas_JJA.yaml b/examples/statistical_downscaling/MPI_tas_JJA.yaml
deleted file mode 100644
index 17a12a7..0000000
--- a/examples/statistical_downscaling/MPI_tas_JJA.yaml
+++ /dev/null
@@ -1,29 +0,0 @@
-case_name: MPI_tas_JJA
-
-# downscaling method (1: delta addition, 2: Delta correction, 3: quantile mapping, 4: asynchronous regression)
-downscaling_option: 4
-
-# longitude (-180 ~ 180) and latitude (-90 ~ 90) of the grid point to downscale model output [in degrees]
-location:
-    name: HoChiMinh_City 
-    grid_lat: 10.75    
-    grid_lon: 106.67   
-
-# Season (for December - February, month_start=12 & month_end =2; for June - August, month_start=6 & month_end = 8)
-month_index: !!python/tuple [6,7,8]
-
-# reference (observation) data
-reference:
-    data_source: local
-    data_name: CRU
-    path: ./data/observation/tas_cru_monthly_1981-2010.nc
-    variable: tas
-
-model:
-    data_name: MPI
-    variable: tas
-    present:
-        path: ./data/model_present/tas_Amon_MPI_decadal1980_198101-201012.nc
-    future:
-        scenario_name: RCP8.5_2041-70
-        path: ./data/model_rcp85/tas_Amon_MPI_rcp85_204101-207012.nc 

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/statistical_downscaling/run_statistical_downscaling.py
----------------------------------------------------------------------
diff --git a/examples/statistical_downscaling/run_statistical_downscaling.py b/examples/statistical_downscaling/run_statistical_downscaling.py
deleted file mode 100644
index 60c6ac2..0000000
--- a/examples/statistical_downscaling/run_statistical_downscaling.py
+++ /dev/null
@@ -1,231 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import yaml
-import os
-import sys
-import xlwt
-
-import numpy as np
-import numpy.ma as ma
-
-import ocw.data_source.local as local
-import ocw.dataset as ds
-import ocw.dataset_processor as dsp
-import ocw.statistical_downscaling as down
-import ocw.plotter as plotter
-
-import ssl
-
-def spatial_aggregation(target_dataset, lon_min, lon_max, lat_min, lat_max):
-    """ Spatially subset a dataset within the given longitude and latitude boundaryd_lon-grid_space, grid_lon+grid_space
-    :param target_dataset: Dataset object that needs spatial subsetting
-    :type target_dataset: Open Climate Workbench Dataset Object
-    :param lon_min: minimum longitude (western boundary)
-    :type lon_min: float
-    :param lon_max: maximum longitude (eastern boundary)
-    :type lon_min: float
-    :param lat_min: minimum latitude (southern boundary) 
-    :type lat_min: float
-    :param lat_min: maximum latitude (northern boundary) 
-    :type lat_min: float
-    :returns: A new spatially subset Dataset
-    :rtype: Open Climate Workbench Dataset Object
-    """
-
-    if target_dataset.lons.ndim == 1 and target_dataset.lats.ndim == 1:
-        new_lon, new_lat = np.meshgrid(target_dataset.lons, target_dataset.lats)
-    elif target_dataset.lons.ndim == 2 and target_dataset.lats.ndim == 2:
-        new_lon = target_datasets.lons
-        new_lat = target_datasets.lats
- 
-    y_index, x_index = np.where((new_lon >= lon_min) & (new_lon <= lon_max) & (new_lat >= lat_min) & (new_lat <= lat_max))[0:2]
-
-    #new_dataset = ds.Dataset(target_dataset.lats[y_index.min():y_index.max()+1],
-    #                         target_dataset.lons[x_index.min():x_index.max()+1],
-    #                         target_dataset.times,
-    #                         target_dataset.values[:,y_index.min():y_index.max()+1,x_index.min():x_index.max()+1],
-    #                         target_dataset.variable,
-    #                         target_dataset.name) 
-    return target_dataset.values[:,y_index.min():y_index.max()+1,x_index.min():x_index.max()+1]
-
-def extract_data_at_nearest_grid_point(target_dataset, longitude, latitude):
-    """ Spatially subset a dataset within the given longitude and latitude boundaryd_lon-grid_space, grid_lon+grid_space
-    :param target_dataset: Dataset object that needs spatial subsetting
-    :type target_dataset: Open Climate Workbench Dataset Object
-    :type longitude: float
-    :param longitude: longitude
-    :type latitude: float
-    :param latitude: latitude 
-    :returns: A new spatially subset Dataset
-    :rtype: Open Climate Workbench Dataset Object
-    """
-
-    if target_dataset.lons.ndim == 1 and target_dataset.lats.ndim == 1:
-        new_lon, new_lat = np.meshgrid(target_dataset.lons, target_dataset.lats)
-    elif target_dataset.lons.ndim == 2 and target_dataset.lats.ndim == 2:
-        new_lon = target_datasets.lons
-        new_lat = target_datasets.lats
-    distance = (new_lon - longitude)**2. + (new_lat - latitude)**2.
-    y_index, x_index = np.where(distance == np.min(distance))[0:2]
-
-    return target_dataset.values[:,y_index[0], x_index[0]]
-
-if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
-
-config_file = str(sys.argv[1])
-
-print 'Reading the configuration file ', config_file
-
-config = yaml.load(open(config_file))
-
-case_name = config['case_name']
-
-downscale_option_names = [' ','delta_addition','delta_correction','quantile_mapping','asynchronous_regression']
-DOWNSCALE_OPTION = config['downscaling_option']
-
-location = config['location']
-grid_lat = location['grid_lat']
-grid_lon = location['grid_lon']
-
-month_index = config['month_index']
-month_start = month_index[0]
-month_end = month_index[-1]    
-
-ref_info = config['reference']
-model_info = config['model']
-
-# Filename for the output data/plot (without file extension)
-OUTPUT = "%s_%s_%s_%s_%s" %(location['name'], ref_info['variable'], model_info['data_name'], ref_info['data_name'],model_info['future']['scenario_name'])
-
-print("Processing "+ ref_info['data_name'] + "  data")
-""" Step 1: Load Local NetCDF Files into OCW Dataset Objects """
-
-print("Loading %s into an OCW Dataset Object" % (ref_info['path'],))
-ref_dataset = local.load_file(ref_info['path'], ref_info['variable'])
-print(ref_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (ref_dataset.values.shape,))
-
-print("Loading %s into an OCW Dataset Object" % (model_info['present']['path'],))
-model_dataset_present = local.load_file(model_info['present']['path'], model_info['variable'])
-print(model_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (model_dataset_present.values.shape,))
-dy = model_dataset_present.spatial_resolution()[0]
-dx = model_dataset_present.spatial_resolution()[1]
-
-model_dataset_future = local.load_file(model_info['future']['path'], model_info['variable'])
-print(model_info['future']['scenario_name']+':'+model_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (model_dataset_future.values.shape,))
-
-""" Step 2: Temporal subsetting """
-print("Temporal subsetting for the selected month(s)")
-ref_temporal_subset = dsp.temporal_subset(month_start, month_end, ref_dataset)
-model_temporal_subset_present = dsp.temporal_subset(month_start, month_end, model_dataset_present)
-model_temporal_subset_future = dsp.temporal_subset(month_start, month_end, model_dataset_future)
-
-""" Step 3: Spatial aggregation of observational data into the model grid """
-print("Spatial aggregation of observational data near latitude %0.2f and longitude %0.2f " % (grid_lat, grid_lon))
-# There are two options to aggregate observational data near a model grid point
-#ref_subset = spatial_aggregation(ref_temporal_subset, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
-#model_subset_present = spatial_aggregation(model_temporal_subset_present, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
-#model_subset_future = spatial_aggregation(model_temporal_subset_future, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
-ref_subset = extract_data_at_nearest_grid_point(ref_temporal_subset, grid_lon, grid_lat)
-model_subset_present = extract_data_at_nearest_grid_point(model_temporal_subset_present, grid_lon, grid_lat)
-model_subset_future = extract_data_at_nearest_grid_point(model_temporal_subset_future, grid_lon, grid_lat)
-
-
-""" Step 4:  Create a statistical downscaling object and downscaling model output """
-# You can add other methods
-print("Creating a statistical downscaling object")
-
-downscale = down.Downscaling(ref_subset, model_subset_present, model_subset_future)
-
-print(downscale_option_names[DOWNSCALE_OPTION]+": Downscaling model output")
-
-if DOWNSCALE_OPTION == 1:
-    downscaled_model_present, downscaled_model_future = downscale.Delta_addition()
-elif DOWNSCALE_OPTION == 2:
-    downscaled_model_present, downscaled_model_future = downscale.Delta_correction()
-elif DOWNSCALE_OPTION == 3:
-    downscaled_model_present, downscaled_model_future = downscale.Quantile_mapping()
-elif DOWNSCALE_OPTION == 4:
-    downscaled_model_present, downscaled_model_future = downscale.Asynchronous_regression()
-else:
-    sys.exit("DOWNSCALE_OPTION must be an integer between 1 and 4")
-
-
-""" Step 5: Create plots and spreadsheet """
-print("Plotting results")
-if not os.path.exists(case_name):
-    os.system("mkdir "+case_name)
-os.chdir(os.getcwd()+"/"+case_name)
-
-plotter.draw_marker_on_map(grid_lat, grid_lon, fname='downscaling_location', location_name=config['location']['name'])
-
-plotter.draw_histogram([ref_subset.ravel(), model_subset_present.ravel(), model_subset_future.ravel()], 
-                       data_names = [ref_info['data_name'], model_info['data_name'], model_info['future']['scenario_name']],
-                       fname=OUTPUT+'_original')
-                        
-plotter.draw_histogram([ref_subset.ravel(), downscaled_model_present, downscaled_model_future], 
-                       data_names = [ref_info['data_name'], model_info['data_name'], model_info['future']['scenario_name']],
-                       fname=OUTPUT+'_downscaled_using_'+downscale_option_names[DOWNSCALE_OPTION])
-
-print("Generating spreadsheet")
-
-workbook = xlwt.Workbook()
-sheet = workbook.add_sheet(downscale_option_names[config['downscaling_option']])
-
-sheet.write(0, 0, config['location']['name'])
-sheet.write(0, 2, 'longitude')
-sheet.write(0, 4, 'latitude')
-sheet.write(0, 6, 'month')
-
-
-sheet.write(0, 3, grid_lon)
-sheet.write(0, 5, grid_lat)
-
-
-
-for imonth,month in enumerate(month_index):
-    sheet.write(0, 7+imonth, month)
-
-sheet.write(3, 1, 'observation')
-sheet.write(4, 1, ref_info['data_name'])
-for idata, data in enumerate(ref_subset.ravel()[~ref_subset.ravel().mask]):
-    sheet.write(5+idata,1,data.item())
-
-sheet.write(3, 2, 'original')
-sheet.write(4, 2, model_info['data_name'])
-for idata, data in enumerate(model_subset_present.ravel()):
-    sheet.write(5+idata,2,data.item())
-
-sheet.write(3, 3, 'original')
-sheet.write(4, 3, model_info['future']['scenario_name'])
-for idata, data in enumerate(model_subset_future.ravel()):
-    sheet.write(5+idata,3,data.item())
-
-sheet.write(3, 4, 'downscaled')
-sheet.write(4, 4, model_info['data_name'])
-for idata, data in enumerate(downscaled_model_present):
-    sheet.write(5+idata,4,data.item())
-
-sheet.write(3, 5, 'downscaled')
-sheet.write(4, 5, model_info['future']['scenario_name'])
-for idata, data in enumerate(downscaled_model_future):
-    sheet.write(5+idata,5,data.item())
-
-workbook.save(OUTPUT+'.xls')
-


[5/7] climate git commit: Folder names have been changed

Posted by hu...@apache.org.
Folder names have been changed


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/43cdfd69
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/43cdfd69
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/43cdfd69

Branch: refs/heads/master
Commit: 43cdfd699e50b130cbc7e55208144362e87fb206
Parents: c6c9dd1
Author: huikyole <hu...@jpl.nasa.gov>
Authored: Thu Jan 21 13:06:39 2016 -0800
Committer: huikyole <hu...@jpl.nasa.gov>
Committed: Thu Jan 21 13:06:39 2016 -0800

----------------------------------------------------------------------
 ...ordex-arctic_cloud_fraction_bias_to_SRB.yaml | 65 ++++++++++++++++
 .../cordex-arctic_rlds_bias_to_SRB.yaml         | 65 ++++++++++++++++
 .../cordex-arctic_rlus_bias_to_SRB.yaml         | 65 ++++++++++++++++
 .../cordex-arctic_rsds_bias_to_SRB.yaml         | 65 ++++++++++++++++
 .../NARCCAP_examples/Fig10_and_Fig11.yaml       | 81 +++++++++++++++++++
 .../NARCCAP_examples/Fig12_summer.yaml          | 75 ++++++++++++++++++
 .../NARCCAP_examples/Fig12_winter.yaml          | 75 ++++++++++++++++++
 .../NARCCAP_examples/Fig14_and_Fig15.yaml       | 82 ++++++++++++++++++++
 .../NARCCAP_examples/Fig16_summer.yaml          | 75 ++++++++++++++++++
 .../NARCCAP_examples/Fig16_winter.yaml          | 75 ++++++++++++++++++
 .../NARCCAP_examples/Fig5_and_Fig6.yaml         | 50 ++++++++++++
 .../NARCCAP_examples/Fig7_summer.yaml           | 75 ++++++++++++++++++
 .../NARCCAP_examples/Fig7_winter.yaml           | 75 ++++++++++++++++++
 .../NARCCAP_examples/Fig8_and_Fig9.yaml         | 50 ++++++++++++
 14 files changed, 973 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
new file mode 100644
index 0000000..eb4b4c5
--- /dev/null
+++ b/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+workdir: ./
+output_netcdf_filename: cordex-arctic_clt_MAR-SEP.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 3
+    month_end: 9
+    average_each_year: False
+
+space:
+    min_lat: 55.00 
+    max_lat: 89.5 
+    min_lon: -179.75
+    max_lon: 178.50
+
+regrid:
+    regrid_on_reference: True
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
+        variable: cld_frac
+        multiplying_factor: 100.0
+
+    targets:
+        data_source: local
+        path: /home/huikyole/data/CORDEX-ARC/clt*.nc                                                    
+        variable: clt     
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-arctic_clt_MAR-SEP_mean_bias_to_SRB
+    subplots_array: !!python/tuple [2,2] 
+    map_projection: npstere
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rlds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rlds_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rlds_bias_to_SRB.yaml
new file mode 100644
index 0000000..1311843
--- /dev/null
+++ b/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rlds_bias_to_SRB.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+workdir: ./
+output_netcdf_filename: cordex-arctic_rlds_JUL.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 7
+    month_end: 7
+    average_each_year: False
+
+space:
+    min_lat: 55.00 
+    max_lat: 89.5 
+    min_lon: -179.75
+    max_lon: 178.50
+
+regrid:
+    regrid_on_reference: True
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
+        variable: lw_sfc_dn
+        multiplying_factor: 1
+
+    targets:
+        data_source: local
+        path: /home/huikyole/data/CORDEX-ARC/rlds*.nc                                                    
+        variable: rlds    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-arctic_rlds_JUL_mean_bias_to_SRB
+    subplots_array: !!python/tuple [1,2] 
+    map_projection: npstere
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rlus_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rlus_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rlus_bias_to_SRB.yaml
new file mode 100644
index 0000000..b03738a
--- /dev/null
+++ b/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rlus_bias_to_SRB.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+workdir: ./
+output_netcdf_filename: cordex-arctic_rlus_JUL.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 7
+    month_end: 7
+    average_each_year: False
+
+space:
+    min_lat: 55.00 
+    max_lat: 89.5 
+    min_lon: -179.75
+    max_lon: 178.50
+
+regrid:
+    regrid_on_reference: True
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
+        variable: lw_sfc_up
+        multiplying_factor: 1
+
+    targets:
+        data_source: local
+        path: /home/huikyole/data/CORDEX-ARC/rlus*.nc                                                    
+        variable: rlus    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-arctic_rlus_JUL_mean_bias_to_SRB
+    subplots_array: !!python/tuple [2,2] 
+    map_projection: npstere
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rsds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rsds_bias_to_SRB.yaml b/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rsds_bias_to_SRB.yaml
new file mode 100644
index 0000000..9613e46
--- /dev/null
+++ b/RCMES/configuration_files/CORDEX-Arctic_examples/cordex-arctic_rsds_bias_to_SRB.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+workdir: ./
+output_netcdf_filename: cordex-arctic_rsds_JUL.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: True 
+    start_time: 1990-01-01
+    end_time: 2007-12-31
+    temporal_resolution: monthly
+    month_start: 7
+    month_end: 7
+    average_each_year: False
+
+space:
+    min_lat: 55.00 
+    max_lat: 89.5 
+    min_lon: -179.75
+    max_lon: 178.50
+
+regrid:
+    regrid_on_reference: True
+    regrid_dlat: 0.44
+    regrid_dlon: 0.44
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
+        variable: sw_sfc_dn
+        multiplying_factor: 1
+
+    targets:
+        data_source: local
+        path: /home/huikyole/data/CORDEX-ARC/rsds*.nc                                                    
+        variable: rsds    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: cordex-arctic_rsds_JUL_mean_bias_to_SRB
+    subplots_array: !!python/tuple [2,2] 
+    map_projection: npstere
+
+use_subregions: False
+

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig10_and_Fig11.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig10_and_Fig11.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig10_and_Fig11.yaml
new file mode 100644
index 0000000..0650e61
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig10_and_Fig11.yaml
@@ -0,0 +1,81 @@
+workdir: ./
+output_netcdf_filename: narccap_prec_monthly_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: False
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ../data/prec*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 2
+
+metrics1: Timeseries_plot_subregion_annual_cycle
+
+plots1:
+    file_name: Fig10
+    subplots_array: !!python/tuple [7,2]
+
+metrics2: Portrait_diagram_subregion_annual_cycle                
+
+plots2:
+    file_name: Fig11
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig12_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig12_summer.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig12_summer.yaml
new file mode 100644
index 0000000..f11c136
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig12_summer.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_prec_JJA_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 6
+    month_end: 8
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ../data/prec*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig12_summer
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig12_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig12_winter.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig12_winter.yaml
new file mode 100644
index 0000000..f1f0b1e
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig12_winter.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_prec_DJF_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 12
+    month_end: 2
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ../data/prec*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig12_winter 
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig14_and_Fig15.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig14_and_Fig15.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig14_and_Fig15.yaml
new file mode 100644
index 0000000..5e01ce0
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig14_and_Fig15.yaml
@@ -0,0 +1,82 @@
+workdir: ./
+output_netcdf_filename: narccap_rsds_monthly_1984-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1984-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: False
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
+        variable: sw_sfc_dn
+  
+
+    targets:
+        data_source: local
+        path: ../data/rsds*ncep.monavg.nc                                                    
+        variable: rsds    
+
+number_of_metrics_and_plots: 2
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: Fig14
+    subplots_array: !!python/tuple [4,2]
+
+metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots2:
+    file_name: Fig15
+
+use_subregions: False
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig16_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig16_summer.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig16_summer.yaml
new file mode 100644
index 0000000..db33eff
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig16_summer.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_rsds_JJA_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1984-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 6
+    month_end: 8
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc
+        variable: sw_sfc_dn
+
+    targets:
+        data_source: local
+        path: ../data/rsds*ncep.monavg.nc                                                    
+        variable: rsds    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig16_summer
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig16_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig16_winter.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig16_winter.yaml
new file mode 100644
index 0000000..e25a4b2
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig16_winter.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_rsds_DJF_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1984-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 12
+    month_end: 2
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: local
+        data_name: SRB
+        path: ../data/srb_rel3.0_shortwave_from_1983_to_2007.nc
+        variable: sw_sfc_dn
+
+    targets:
+        data_source: local
+        path: ../data/rsds*ncep.monavg.nc                                                    
+        variable: rsds    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig16_winter
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig5_and_Fig6.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig5_and_Fig6.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig5_and_Fig6.yaml
new file mode 100644
index 0000000..ef7cc9c
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig5_and_Fig6.yaml
@@ -0,0 +1,50 @@
+workdir: ./                                      
+output_netcdf_filename: narccap_tas_annual_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: True  
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 38
+
+    targets:
+        data_source: local
+        path: ../data/temp.*ncep.monavg.nc                                                    
+        variable: temp    
+
+number_of_metrics_and_plots: 2
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: Fig5
+    subplots_array: !!python/tuple [4,2]
+
+metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots2:
+    file_name: Fig6
+
+use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig7_summer.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig7_summer.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig7_summer.yaml
new file mode 100644
index 0000000..ddbce3b
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig7_summer.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_tas_JJA_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 6
+    month_end: 8
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 38
+
+    targets:
+        data_source: local
+        path: ../data/temp*ncep.monavg.nc                                                    
+        variable: temp    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig7_summer
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig7_winter.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig7_winter.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig7_winter.yaml
new file mode 100644
index 0000000..38add9b
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig7_winter.yaml
@@ -0,0 +1,75 @@
+workdir: ./
+output_netcdf_filename: narccap_tas_DJF_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 12
+    month_end: 2
+    average_each_year: True
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 38
+
+    targets:
+        data_source: local
+        path: ../data/temp*ncep.monavg.nc                                                    
+        variable: temp    
+
+number_of_metrics_and_plots: 1
+
+metrics1: Portrait_diagram_subregion_interannual_variability
+
+plots1:
+    file_name: Fig7_winter 
+
+use_subregions: True 
+
+subregions:
+#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
+    R01: 
+      [42.75, 49.75, -123.75, -120.25]
+    R02:
+      [42.75, 49.75, -119.75, -112.75]
+    R03:
+      [37.25, 42.25, -123.75, -117.75]
+    R04: 
+      [32.25, 37.25, -122.75, -114.75]
+    R05:
+      [31.25, 37.25, -113.75, -108.25]
+    R06:
+      [31.25, 37.25, -108.25, -99.75]
+    R07:
+      [37.25, 43.25, -110.25, -103.75]
+    R08: 
+      [45.25, 49.25, -99.75, -90.25]
+    R09: 
+      [34.75, 45.25, -99.75, -90.25]
+    R10: 
+      [29.75, 34.75, -95.75, -84.75]
+    R11: 
+      [38.25, 44.75, -89.75, -80.25]
+    R12: 
+      [38.25, 44.75, -79.75, -70.25]
+    R13: 
+      [30.75, 38.25, -83.75, -75.25]
+    R14: 
+      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/43cdfd69/RCMES/configuration_files/NARCCAP_examples/Fig8_and_Fig9.yaml
----------------------------------------------------------------------
diff --git a/RCMES/configuration_files/NARCCAP_examples/Fig8_and_Fig9.yaml b/RCMES/configuration_files/NARCCAP_examples/Fig8_and_Fig9.yaml
new file mode 100644
index 0000000..d25ecb6
--- /dev/null
+++ b/RCMES/configuration_files/NARCCAP_examples/Fig8_and_Fig9.yaml
@@ -0,0 +1,50 @@
+workdir: ./                                      
+output_netcdf_filename: narccap_prec_annual_mean_1980-2003.nc
+
+# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
+time:
+    maximum_overlap_period: False
+    start_time: 1980-01-01
+    end_time: 2003-12-31
+    temporal_resolution: monthly
+    month_start: 1
+    month_end: 12
+    average_each_year: True  
+
+space:
+    min_lat: 23.75
+    max_lat: 49.75
+    min_lon: -125.75
+    max_lon: -66.75
+
+regrid:
+    regrid_on_reference: False
+    regrid_dlat: 0.50
+    regrid_dlon: 0.50
+
+datasets:
+    reference:
+        data_source: rcmed
+        data_name: CRU
+        dataset_id: 10
+        parameter_id: 37
+
+    targets:
+        data_source: local
+        path: ../data/prec.*ncep.monavg.nc                                                    
+        variable: prec    
+
+number_of_metrics_and_plots: 2
+
+metrics1: Map_plot_bias_of_multiyear_climatology
+
+plots1:
+    file_name: Fig8
+    subplots_array: !!python/tuple [4,2]
+
+metrics2: Taylor_diagram_spatial_pattern_of_multiyear_climatology
+
+plots2:
+    file_name: Fig9
+
+use_subregions: False