You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by hu...@apache.org on 2017/05/11 14:33:41 UTC

[1/3] climate git commit: CLIMATE-914 - Update dataset_processor.spatial_regrid module

Repository: climate
Updated Branches:
  refs/heads/master 523fc7d32 -> 1ad9e17c6


CLIMATE-914 - Update dataset_processor.spatial_regrid module

- map_coordinates has been replaced with scipy.interpolate.griddata.


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/6cd5c1d4
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/6cd5c1d4
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/6cd5c1d4

Branch: refs/heads/master
Commit: 6cd5c1d4ef2177750d259943f369a5e2928040fc
Parents: 523fc7d
Author: huikyole <hu...@argo.jpl.nasa.gov>
Authored: Wed May 10 16:23:05 2017 -0700
Committer: huikyole <hu...@argo.jpl.nasa.gov>
Committed: Wed May 10 16:23:05 2017 -0700

----------------------------------------------------------------------
 ocw/dataset_processor.py | 90 +++++++++++++++----------------------------
 1 file changed, 30 insertions(+), 60 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/6cd5c1d4/ocw/dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
index 917419e..2097cc4 100755
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -21,7 +21,7 @@ import ocw.utils as utils
 import datetime
 import numpy as np
 import numpy.ma as ma
-import scipy.interpolate
+from scipy.interpolate import griddata
 import scipy.ndimage
 from scipy.stats import rankdata
 from scipy.ndimage import map_coordinates
@@ -222,10 +222,7 @@ def spatial_regrid(target_dataset, new_latitudes, new_longitudes,
 
     # Make masked array of shape (times, new_latitudes,new_longitudes)
     new_values = ma.zeros([len(target_dataset.times),
-                           ny_new, nx_new])
-    # Make masked array of shape (times, new_latitudes,new_longitudes)
-    new_values = ma.zeros([len(target_dataset.times),
-                           ny_new, nx_new])
+                           ny_new, nx_new])+1.e+20
 
     # Boundary vertices of target_dataset
     vertices = []
@@ -249,81 +246,54 @@ def spatial_regrid(target_dataset, new_latitudes, new_longitudes,
         for ix in np.arange(nx_old)[::-1]:
             vertices.append([lons[0, ix], lats[0, ix]])
     path = Path(vertices)
-
-    # Convert new_lats and new_lons to float indices
-    new_lons_indices = np.zeros(new_lons.shape)
-    new_lats_indices = np.zeros(new_lats.shape)
+    new_xy_mask = np.ones(new_lats.shape)
 
     for iy in np.arange(ny_new):
         for ix in np.arange(nx_new):
             if path.contains_point([new_lons[iy, ix],
                                     new_lats[iy, ix]]) or not boundary_check:
-                if regular_grid:
-                    mn = lats.min()
-                    mx = lats.max()
-                    new_lats_indices[iy, ix] = (
-                        ny_old - 1.) * (new_lats[iy, ix] - mn) / (mx - mn)
-                    mn = lons.min()
-                    mx = lons.max()
-                    new_lons_indices[iy, ix] = (
-                        nx_old - 1.) * (new_lons[iy, ix] - mn) / (mx - mn)
-                else:
-                    distance_from_original_grids = (
-                        (lons - new_lons[iy, ix])**2. +
-                        (lats - new_lats[iy, ix])**2.)**0.5
-                    if np.min(distance_from_original_grids) == 0.:
-                        new_lats_indices[iy, ix], new_lons_indices[
-                            iy, ix] = np.where(
-                                distance_from_original_grids == 0)
-                    else:
-                        distance_rank = rankdata(
-                            distance_from_original_grids.flatten(),
-                            method='ordinal').reshape(lats.shape)
-                        # the nearest grid point's indices
-                        iy1, ix1 = np.where(distance_rank == 1)
-                        # point [iy2, ix] is diagonally across from [iy1, ix1]
-                        iy2, ix2 = np.where(distance_rank == 4)
-                        dist1 = distance_from_original_grids[iy1, ix1]
-                        dist2 = distance_from_original_grids[iy2, ix2]
-                        new_lats_indices[iy, ix] = (
-                            dist1 * iy2 + dist2 * iy1) / (dist1 + dist2)
-                        new_lons_indices[iy, ix] = (
-                            dist1 * ix2 + dist2 * ix1) / (dist1 + dist2)
-            else:
-                new_lats_indices[iy, ix] = -999.
-                new_lats_indices[iy, ix] = -999.
-    new_lats_indices = ma.masked_less(new_lats_indices, 0.)
-    new_lons_indices = ma.masked_less(new_lons_indices, 0.)
-
+               new_xy_mask[iy, ix] = 0.
+            
+    new_index = np.where(new_xy_mask == 0.)
     # Regrid the data on each time slice
     for i in range(len(target_dataset.times)):
         if len(target_dataset.times) == 1 and target_dataset.values.ndim == 2:
             values_original = ma.array(target_dataset.values)
         else:
             values_original = ma.array(target_dataset.values[i])
+        new_mask = np.copy(values_original.mask)
         for shift in (-1, 1):
             for axis in (0, 1):
                 q_shifted = np.roll(values_original, shift=shift, axis=axis)
-                idx = ~q_shifted.mask * values_original.mask
-                indices = np.where(idx)[0]
-                values_original.data[indices] = q_shifted[indices]
-        new_values[i] = map_coordinates(values_original,
-                                        [new_lats_indices.flatten(),
-                                         new_lons_indices.flatten()],
-                                        order=1).reshape(new_lats.shape)
-        new_values[i] = ma.array(new_values[i], mask=new_lats_indices.mask)
+                if (np.where((values_original.mask == True) & (q_shifted.mask == False)))[0].size !=0:
+                    index1 =np.where((values_original.mask == True) & (q_shifted.mask == False))
+                    n_indices = len(index1[0])
+                    values_original.data[index1] = q_shifted[index1]
+                    new_mask[index1] = np.repeat(False, n_indices)
+                    mask_index = np.where(~new_mask)
+        if new_mask.size != 1:
+            mask_index = np.where(~new_mask)
+        else:
+            mask_index = np.where(~np.isnan(values_original))
+        new_values_temp = griddata((lons[mask_index], lats[mask_index]), values_original[mask_index],
+                              (new_lons[new_index],
+                               new_lats[new_index]),
+                               method='linear')
         # Make a masking map using nearest neighbour interpolation -use this to
         # determine locations with MDI and mask these
         qmdi = np.zeros_like(values_original)
-        values_true_indices = np.where(values_original.mask == True)[0]
-        values_false_indices = np.where(values_original.mask == False)[0]
+        values_true_indices = np.where(values_original.mask == True)
+        values_false_indices = np.where(values_original.mask == False)
         qmdi[values_true_indices] = 1.
         qmdi[values_false_indices] = 0.
-        qmdi_r = map_coordinates(qmdi, [new_lats_indices.flatten(
-        ), new_lons_indices.flatten()], order=1).reshape(new_lats.shape)
-        mdimask = (qmdi_r != 0.0)
+        qmdi_r = griddata((lons.flatten(), lats.flatten()), qmdi.flatten(), 
+                             (new_lons[new_index],
+                              new_lats[new_index]),
+                              method='nearest')
+        new_values_temp = ma.masked_where(qmdi_r != 0.0, new_values_temp)
         # Combine missing data mask, with outside domain mask define above.
-        new_values[i].mask = np.logical_or(mdimask, new_values[i].mask)
+        new_values[i, new_index[0], new_index[1]] = new_values_temp[:]
+        new_values[i,:] = ma.masked_equal(new_values[i,:], 1.e+20)
 
     # TODO:
     # This will call down to the _congrid() function and the lat and lon


[3/3] climate git commit: Merge branch 'CLIMATE-914'

Posted by hu...@apache.org.
Merge branch 'CLIMATE-914'


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/1ad9e17c
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/1ad9e17c
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/1ad9e17c

Branch: refs/heads/master
Commit: 1ad9e17c6d0d7a965af757510a210ae47a570d3e
Parents: 523fc7d ee5f3c2
Author: huikyole <hu...@argo.jpl.nasa.gov>
Authored: Thu May 11 07:33:28 2017 -0700
Committer: huikyole <hu...@argo.jpl.nasa.gov>
Committed: Thu May 11 07:33:28 2017 -0700

----------------------------------------------------------------------
 ocw/dataset_processor.py            | 90 +++++++++++---------------------
 ocw/tests/test_dataset_processor.py |  6 +--
 2 files changed, 32 insertions(+), 64 deletions(-)
----------------------------------------------------------------------



[2/3] climate git commit: the unreasonable reshaping of latitude and longitude was replaced by the numpy.meshgrid

Posted by hu...@apache.org.
the unreasonable reshaping of latitude and longitude was replaced by the numpy.meshgrid


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/ee5f3c26
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/ee5f3c26
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/ee5f3c26

Branch: refs/heads/master
Commit: ee5f3c26fb1d4009325028bda4b29536b14a0319
Parents: 6cd5c1d
Author: huikyole <hu...@argo.jpl.nasa.gov>
Authored: Wed May 10 21:45:06 2017 -0700
Committer: huikyole <hu...@argo.jpl.nasa.gov>
Committed: Wed May 10 21:45:06 2017 -0700

----------------------------------------------------------------------
 ocw/tests/test_dataset_processor.py | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/ee5f3c26/ocw/tests/test_dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/tests/test_dataset_processor.py b/ocw/tests/test_dataset_processor.py
index 32fa42d..27af951 100644
--- a/ocw/tests/test_dataset_processor.py
+++ b/ocw/tests/test_dataset_processor.py
@@ -391,10 +391,8 @@ class TestSpatialRegrid(unittest.TestCase):
                           self.regridded_dataset.variable)
 
     def test_two_dimensional_lats_lons(self):
-        self.input_dataset.lats = np.array(range(-89, 90, 2))
-        self.input_dataset.lons = np.array(range(-179, 180, 4))
-        self.input_dataset.lats = self.input_dataset.lats.reshape(2, 45)
-        self.input_dataset.lons = self.input_dataset.lons.reshape(2, 45)
+        self.input_dataset.lons, self.input_dataset.lats = np.meshgrid(
+                                  np.array(range(-179, 180, 2)), np.array(range(-89, 90, 2)))
         new_dataset = dp.spatial_regrid(
             self.input_dataset, self.new_lats, self.new_lons)
         np.testing.assert_array_equal(new_dataset.lats, self.new_lats)