You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by hu...@apache.org on 2017/03/27 18:18:10 UTC

[2/3] climate git commit: CLIMATE-903

CLIMATE-903

- Landscale code quality has been improved
- test_dataset_processor has been updated for the updated temporal_slice module


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/46f901e0
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/46f901e0
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/46f901e0

Branch: refs/heads/master
Commit: 46f901e03d187e3a995abc9e9950f9bbaab235e3
Parents: 77674bd
Author: huikyole <hu...@argo.jpl.nasa.gov>
Authored: Fri Mar 24 15:44:59 2017 -0700
Committer: huikyole <hu...@argo.jpl.nasa.gov>
Committed: Fri Mar 24 15:44:59 2017 -0700

----------------------------------------------------------------------
 ocw/dataset_processor.py            | 22 +++++++---------------
 ocw/plotter.py                      |  3 +--
 ocw/tests/test_dataset_processor.py |  8 ++++----
 3 files changed, 12 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/46f901e0/ocw/dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
index f8e740a..10f41e3 100755
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -403,7 +403,7 @@ def subset(target_dataset, subregion, subregion_name=None, extract=True, user_ma
         if target_dataset.lats.ndim == 2 and target_dataset.lons.ndim == 2:
             temporal_subset = temporal_slice(
                 target_dataset, subregion.start, subregion.end)
-            nt, ny, nx = temporal_subset.values.shape
+            nt = temporal_subset.values.shape[0]
             y_index, x_index = np.where(
                 (target_dataset.lats >= subregion.lat_max) | (
                     target_dataset.lats <= subregion.lat_min) |
@@ -417,12 +417,11 @@ def subset(target_dataset, subregion, subregion_name=None, extract=True, user_ma
                 target_dataset.lats,
                 target_dataset.lons,
                 temporal_subset.times,
-                new_values,            
+                new_values,
                 variable=target_dataset.variable,
                 units=target_dataset.units,
                 name=subregion_name,
-                origin=target_dataset.origin
-                )
+                origin=target_dataset.origin)
 
         elif target_dataset.lats.ndim == 1 and target_dataset.lons.ndim == 1:
             # Get subregion indices into subregion data
@@ -485,8 +484,7 @@ def subset(target_dataset, subregion, subregion_name=None, extract=True, user_ma
             variable=target_dataset.variable,
             units=target_dataset.units,
             name=subregion_name,
-            origin=target_dataset.origin
-            )
+            origin=target_dataset.origin)
 
     if subregion.boundary_type == 'user':
         temporal_subset = temporal_slice(
@@ -504,9 +502,7 @@ def subset(target_dataset, subregion, subregion_name=None, extract=True, user_ma
             variable=target_dataset.variable,
             units=target_dataset.units,
             name=subregion_name,
-            origin=target_dataset.origin
-            )
-
+            origin=target_dataset.origin)
 
 def temporal_slice(target_dataset, start_time, end_time):
     '''Temporally slice given dataset(s) with subregion information. This does not
@@ -537,17 +533,13 @@ def temporal_slice(target_dataset, start_time, end_time):
         target_dataset.lats,
         target_dataset.lons,
         new_times,
-        new_values,  
+        new_values, 
         variable=target_dataset.variable,
         units=target_dataset.units,
-        origin=target_dataset.origin
-        )
-         
-
+        origin=target_dataset.origin)
 
 def safe_subset(target_dataset, subregion, subregion_name=None):
     '''Safely subset given dataset with subregion information
-
     A standard subset requires that the provided subregion be entirely
     contained within the datasets bounds. `safe_subset` returns the
     overlap of the subregion and dataset without returning an error.

http://git-wip-us.apache.org/repos/asf/climate/blob/46f901e0/ocw/plotter.py
----------------------------------------------------------------------
diff --git a/ocw/plotter.py b/ocw/plotter.py
index 8e56b1d..c0a6858 100755
--- a/ocw/plotter.py
+++ b/ocw/plotter.py
@@ -1153,8 +1153,7 @@ def fill_US_states_with_color(regions, fname, fmt='png', ptitle='',
     m.drawcountries(linewidth=0.)
 
     # Add the title
-    ax.set_title(ptitle) 
-
+    ax.set_title(ptitle)
     # Save the figure
     fig.savefig('%s.%s' % (fname, fmt), bbox_inches='tight', dpi=fig.dpi)
     fig.clf()

http://git-wip-us.apache.org/repos/asf/climate/blob/46f901e0/ocw/tests/test_dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/tests/test_dataset_processor.py b/ocw/tests/test_dataset_processor.py
index 9beee75..32fa42d 100644
--- a/ocw/tests/test_dataset_processor.py
+++ b/ocw/tests/test_dataset_processor.py
@@ -211,8 +211,8 @@ class TestTemporalSlice(unittest.TestCase):
         dates = np.array([datetime.datetime(2000, month, 1)
                           for month in range(start_index + 1, end_index + 2)])
         new_dataset = dp.temporal_slice(self.ten_year_dataset,
-                                        start_index,
-                                        end_index)
+                                        start_time = self.ten_year_dataset.times[start_index],
+                                        end_time = self.ten_year_dataset.times[end_index])
         np.testing.assert_array_equal(new_dataset.times, dates)
 
     def test_returned_dataset_values(self):
@@ -222,8 +222,8 @@ class TestTemporalSlice(unittest.TestCase):
         end_index = 4
         values = self.ten_year_dataset.values[start_index:end_index + 1]
         new_dataset = dp.temporal_slice(self.ten_year_dataset,
-                                        start_index,
-                                        end_index)
+                                        start_time = self.ten_year_dataset.times[start_index],
+                                        end_time = self.ten_year_dataset.times[end_index])
         np.testing.assert_array_equal(new_dataset.values, values)