You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by wh...@apache.org on 2015/04/23 23:17:02 UTC

[1/3] climate git commit: CLIMATE-613 - Add example script of timeseries - Update to include subregion-ing that does not require time bounds - Update documentation

Repository: climate
Updated Branches:
  refs/heads/master 4c95ae091 -> 50f6678c8


CLIMATE-613 - Add example script of timeseries
- Update to include subregion-ing that does not require time bounds
- Update documentation


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/cb4097cd
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/cb4097cd
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/cb4097cd

Branch: refs/heads/master
Commit: cb4097cdf170f6d8fb6607648708807594412e5e
Parents: b258cce
Author: Kim Whitehall <ki...@jpl.nasa.gov>
Authored: Mon Apr 20 11:57:33 2015 -0700
Committer: Kim Whitehall <ki...@jpl.nasa.gov>
Committed: Tue Apr 21 17:18:47 2015 -0700

----------------------------------------------------------------------
 examples/time_series_with_regions.py | 144 ++++++++++++++++++++++++++++++
 1 file changed, 144 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/cb4097cd/examples/time_series_with_regions.py
----------------------------------------------------------------------
diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py
new file mode 100644
index 0000000..dc8cdc4
--- /dev/null
+++ b/examples/time_series_with_regions.py
@@ -0,0 +1,144 @@
+#Apache OCW lib immports
+from ocw.dataset import Dataset, Bounds
+import ocw.data_source.local as local
+import ocw.data_source.rcmed as rcmed
+import ocw.dataset_processor as dsp
+import ocw.evaluation as evaluation
+import ocw.metrics as metrics
+import ocw.plotter as plotter
+import ocw.utils as utils
+
+import datetime
+import numpy as np
+import numpy.ma as ma
+from os import path
+import urllib
+
+# File URL leader
+FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/"
+# Three Local Model Files 
+FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
+FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc"
+FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc"
+
+LAT_MIN = -45.0 
+LAT_MAX = 42.24 
+LON_MIN = -24.0
+LON_MAX = 60.0 
+START = datetime.datetime(2000, 01, 1)
+END = datetime.datetime(2007, 12, 31)
+
+varName = 'pr' 
+gridLonStep=0.44
+gridLatStep=0.44
+
+#needed vars for the script
+target_datasets =[]
+tSeries =[]
+results =[]
+labels =[] # could just as easily b the names for each subregion
+region_counter = 0
+
+# Download necessary NetCDF file if not present
+if not path.exists(FILE_1):
+	urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
+
+if not path.exists(FILE_2):
+    urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
+
+if not path.exists(FILE_3):
+    urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
+
+""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
+target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
+target_datasets.append(local.load_file(FILE_2, varName, name="REGCM"))
+target_datasets.append(local.load_file(FILE_3, varName, name="UCT"))
+
+
+""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """
+print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
+# the dataset_id and the parameter id were determined from  
+# https://rcmes.jpl.nasa.gov/content/data-rcmes-database 
+CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
+
+
+""" Step 3: Regrid datasets ... """
+CRU31 = dsp.water_flux_unit_conversion(CRU31)
+CRU31 = dsp.temporal_rebin(CRU31, datetime.timedelta(days=30))
+
+for member, each_target_dataset in enumerate(target_datasets):
+	target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])
+	target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
+	target_datasets[member] = dsp.temporal_rebin(target_datasets[member], datetime.timedelta(days=30)) 		
+	
+#Regrid
+print("... spatial regrid")
+new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
+new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
+CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
+
+
+for member, each_target_dataset in enumerate(target_datasets):
+	target_datasets[member] = dsp.spatial_regrid(target_datasets[member], new_lats, new_lons)
+
+#find climatology monthly for obs and models
+CRU31.values, CRU31.times = utils.calc_climatology_monthly(CRU31)
+
+for member, each_target_dataset in enumerate(target_datasets):
+	target_datasets[member].values, target_datasets[member].times = utils.calc_climatology_monthly(target_datasets[member])
+		
+#make the model ensemble
+target_datasets_ensemble = dsp.ensemble(target_datasets)
+target_datasets_ensemble.name="ENS"
+
+#append to the target_datasets for final analysis
+target_datasets.append(target_datasets_ensemble)
+
+""" Step 4: Subregion stuff """
+#update what times are for the subregion
+#get time bounds from existing datasets
+START_SUB = CRU31.times[0]
+END_SUB = CRU31.times[-1]
+
+list_of_regions = [
+ Bounds(-10.0, 0.0, 29.0, 36.5, START_SUB, END_SUB), 
+ Bounds(0.0, 10.0,  29.0, 37.5, START_SUB, END_SUB),
+ Bounds(10.0, 20.0, 25.0, 32.5, START_SUB, END_SUB),
+ Bounds(20.0, 33.0, 25.0, 32.5, START_SUB, END_SUB),
+ Bounds(-19.3,-10.2,12.0, 20.0, START_SUB, END_SUB),
+ Bounds( 15.0, 30.0, 15.0, 25.0,START_SUB, END_SUB),
+ Bounds(-10.0, 10.0, 7.3, 15.0, START_SUB, END_SUB),
+ Bounds(-10.9, 10.0, 5.0, 7.3,  START_SUB, END_SUB),
+ Bounds(33.9, 40.0,  6.9, 15.0, START_SUB, END_SUB),
+ Bounds(10.0, 25.0,  0.0, 10.0, START_SUB, END_SUB),
+ Bounds(10.0, 25.0,-10.0,  0.0, START_SUB, END_SUB),
+ Bounds(30.0, 40.0,-15.0,  0.0, START_SUB, END_SUB),
+ Bounds(33.0, 40.0, 25.0, 35.0, START_SUB, END_SUB)]
+
+region_list=[["R"+str(i+1)] for i in xrange(13)]
+
+for regions in region_list:
+	firstTime = True
+	subset_name = regions[0]+"_CRU31"
+	#labels.append(subset_name) #for legend, uncomment this line
+	subset = dsp.subset(list_of_regions[region_counter], CRU31, subset_name)
+	tSeries = utils.calc_time_series(subset)
+	results.append(tSeries)
+	tSeries=[]
+	firstTime = False
+	for member, each_target_dataset in enumerate(target_datasets):
+		subset_name = regions[0]+"_"+target_datasets[member].name
+		#labels.append(subset_name) #for legend, uncomment this line
+		subset = dsp.subset(list_of_regions[region_counter],target_datasets[member],subset_name)
+		tSeries = utils.calc_time_series(subset)
+		results.append(tSeries)
+		tSeries=[]
+	
+	plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[0], ptitle=regions[0],fmt='png')
+	results =[]
+	tSeries =[]
+	labels =[]
+	region_counter+=1
+			
+                               
+


[3/3] climate git commit: Resolve CLIMATE-613. Merge PR#190.

Posted by wh...@apache.org.
Resolve CLIMATE-613. Merge PR#190.


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/50f6678c
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/50f6678c
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/50f6678c

Branch: refs/heads/master
Commit: 50f6678c80bdfc09df1301fb561235dff3b2c486
Parents: 4c95ae0 f64fa70
Author: Kim Whitehall <ki...@jpl.nasa.gov>
Authored: Thu Apr 23 14:18:17 2015 -0700
Committer: Kim Whitehall <ki...@jpl.nasa.gov>
Committed: Thu Apr 23 14:18:17 2015 -0700

----------------------------------------------------------------------
 examples/time_series_with_regions.py | 139 ++++++++++++++++++++++++++++++
 1 file changed, 139 insertions(+)
----------------------------------------------------------------------



[2/3] climate git commit: Update example's subregion-ing and docs

Posted by wh...@apache.org.
Update example's subregion-ing and docs


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/f64fa700
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/f64fa700
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/f64fa700

Branch: refs/heads/master
Commit: f64fa70016e8372930752d500f5b73893d7d89d4
Parents: cb4097c
Author: Kim Whitehall <ki...@jpl.nasa.gov>
Authored: Tue Apr 21 17:17:25 2015 -0700
Committer: Kim Whitehall <ki...@jpl.nasa.gov>
Committed: Tue Apr 21 17:21:08 2015 -0700

----------------------------------------------------------------------
 examples/time_series_with_regions.py | 41 ++++++++++++++-----------------
 1 file changed, 18 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/f64fa700/examples/time_series_with_regions.py
----------------------------------------------------------------------
diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py
index dc8cdc4..b11a807 100644
--- a/examples/time_series_with_regions.py
+++ b/examples/time_series_with_regions.py
@@ -62,17 +62,17 @@ print("Working with the rcmed interface to get CRU3.1 Daily Precipitation")
 CRU31 = rcmed.parameter_dataset(10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END)
 
 
-""" Step 3: Regrid datasets ... """
+""" Step 3: Processing datasets so they are the same shape ... """
+print("Processing datasets so they are the same shape")
 CRU31 = dsp.water_flux_unit_conversion(CRU31)
-CRU31 = dsp.temporal_rebin(CRU31, datetime.timedelta(days=30))
+CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly')
 
 for member, each_target_dataset in enumerate(target_datasets):
 	target_datasets[member] = dsp.subset(EVAL_BOUNDS, target_datasets[member])
 	target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member])
-	target_datasets[member] = dsp.temporal_rebin(target_datasets[member], datetime.timedelta(days=30)) 		
+	target_datasets[member] = dsp.normalize_dataset_datetimes(target_datasets[member], 'monthly')  		
 	
-#Regrid
-print("... spatial regrid")
+print("... spatial regridding")
 new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep)
 new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep)
 CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons)
@@ -95,25 +95,20 @@ target_datasets_ensemble.name="ENS"
 target_datasets.append(target_datasets_ensemble)
 
 """ Step 4: Subregion stuff """
-#update what times are for the subregion
-#get time bounds from existing datasets
-START_SUB = CRU31.times[0]
-END_SUB = CRU31.times[-1]
-
 list_of_regions = [
- Bounds(-10.0, 0.0, 29.0, 36.5, START_SUB, END_SUB), 
- Bounds(0.0, 10.0,  29.0, 37.5, START_SUB, END_SUB),
- Bounds(10.0, 20.0, 25.0, 32.5, START_SUB, END_SUB),
- Bounds(20.0, 33.0, 25.0, 32.5, START_SUB, END_SUB),
- Bounds(-19.3,-10.2,12.0, 20.0, START_SUB, END_SUB),
- Bounds( 15.0, 30.0, 15.0, 25.0,START_SUB, END_SUB),
- Bounds(-10.0, 10.0, 7.3, 15.0, START_SUB, END_SUB),
- Bounds(-10.9, 10.0, 5.0, 7.3,  START_SUB, END_SUB),
- Bounds(33.9, 40.0,  6.9, 15.0, START_SUB, END_SUB),
- Bounds(10.0, 25.0,  0.0, 10.0, START_SUB, END_SUB),
- Bounds(10.0, 25.0,-10.0,  0.0, START_SUB, END_SUB),
- Bounds(30.0, 40.0,-15.0,  0.0, START_SUB, END_SUB),
- Bounds(33.0, 40.0, 25.0, 35.0, START_SUB, END_SUB)]
+ Bounds(-10.0, 0.0, 29.0, 36.5), 
+ Bounds(0.0, 10.0,  29.0, 37.5), 
+ Bounds(10.0, 20.0, 25.0, 32.5),
+ Bounds(20.0, 33.0, 25.0, 32.5), 
+ Bounds(-19.3,-10.2,12.0, 20.0), 
+ Bounds( 15.0, 30.0, 15.0, 25.0),
+ Bounds(-10.0, 10.0, 7.3, 15.0), 
+ Bounds(-10.9, 10.0, 5.0, 7.3),  
+ Bounds(33.9, 40.0,  6.9, 15.0),
+ Bounds(10.0, 25.0,  0.0, 10.0), 
+ Bounds(10.0, 25.0,-10.0,  0.0), 
+ Bounds(30.0, 40.0,-15.0,  0.0), 
+ Bounds(33.0, 40.0, 25.0, 35.0)]
 
 region_list=[["R"+str(i+1)] for i in xrange(13)]