You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by wh...@apache.org on 2015/04/23 22:48:58 UTC
[2/3] climate git commit: - Wrote DSP subset function more succintly
- Update DS print statement in _validate_inputs - Update Bounds __init__
logic
- Wrote DSP subset function more succintly
- Update DS print statement in _validate_inputs
- Update Bounds __init__ logic
Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/446e827b
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/446e827b
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/446e827b
Branch: refs/heads/master
Commit: 446e827b823e66e3d15d224a7ad60479e2d4a13d
Parents: 126631d
Author: Kim Whitehall <ki...@jpl.nasa.gov>
Authored: Thu Apr 23 13:10:44 2015 -0700
Committer: Kim Whitehall <ki...@jpl.nasa.gov>
Committed: Thu Apr 23 13:10:44 2015 -0700
----------------------------------------------------------------------
ocw/dataset.py | 16 ++++-----
ocw/dataset_processor.py | 82 +++++++++++++++++++++----------------------
2 files changed, 49 insertions(+), 49 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/climate/blob/446e827b/ocw/dataset.py
----------------------------------------------------------------------
diff --git a/ocw/dataset.py b/ocw/dataset.py
index b5358f9..d231f0e 100644
--- a/ocw/dataset.py
+++ b/ocw/dataset.py
@@ -185,7 +185,7 @@ class Dataset:
# Finally check that the Values array conforms to the proper shape
if value_dim == 2 and values.shape != (lat_count, lon_count):
err_msg = """Value Array must be of shape (times, lats, lons).
- Expected shape (%s, %s, %s) but received (%s, %s, %s)""" % (lat_count,
+ Expected shape (%s, %s) but received (%s, %s)""" % (lat_count,
lon_count,
values.shape[0],
values.shape[1])
@@ -270,16 +270,16 @@ class Bounds(object):
self._lon_min = float(lon_min)
self._lon_max = float(lon_max)
- if not start:
- self._start = None
- else:
+ if start:
self._start = start
-
- if not end:
- self._end = None
else:
- self._end = end
+ self._start = None
+ if end:
+ self._end = end
+ else:
+ self._end = None
+
@property
def lat_min(self):
return self._lat_min
http://git-wip-us.apache.org/repos/asf/climate/blob/446e827b/ocw/dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
index 76da817..f600429 100644
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -183,48 +183,48 @@ def subset(subregion, target_dataset, subregion_name=None):
if not subregion_name:
subregion_name = target_dataset.name
- # Build new dataset with subset information
+ # Slice the values array with our calculated slice indices
if target_dataset.values.ndim == 2:
- return ds.Dataset(
- # Slice the lats array with our calculated slice indices
- target_dataset.lats[dataset_slices["lat_start"]:
- dataset_slices["lat_end"] + 1],
- # Slice the lons array with our calculated slice indices
- target_dataset.lons[dataset_slices["lon_start"]:
- dataset_slices["lon_end"] + 1],
- # Slice the times array with our calculated slice indices
- target_dataset.times[dataset_slices["time_start"]:
- dataset_slices["time_end"]+ 1],
- # Slice the values array with our calculated slice indices
- target_dataset.values[
- dataset_slices["lat_start"]:dataset_slices["lat_end"] + 1,
- dataset_slices["lon_start"]:dataset_slices["lon_end"] + 1],
- variable=target_dataset.variable,
- units=target_dataset.units,
- name=subregion_name,
- origin=target_dataset.origin
- )
- if target_dataset.values.ndim == 3:
- return ds.Dataset(
- # Slice the lats array with our calculated slice indices
- target_dataset.lats[dataset_slices["lat_start"]:
- dataset_slices["lat_end"] + 1],
- # Slice the lons array with our calculated slice indices
- target_dataset.lons[dataset_slices["lon_start"]:
- dataset_slices["lon_end"] + 1],
- # Slice the times array with our calculated slice indices
- target_dataset.times[dataset_slices["time_start"]:
- dataset_slices["time_end"]+ 1],
- # Slice the values array with our calculated slice indices
- target_dataset.values[
- dataset_slices["time_start"]:dataset_slices["time_end"] + 1,
- dataset_slices["lat_start"]:dataset_slices["lat_end"] + 1,
- dataset_slices["lon_start"]:dataset_slices["lon_end"] + 1],
- variable=target_dataset.variable,
- units=target_dataset.units,
- name=subregion_name,
- origin=target_dataset.origin
- )
+ subset_values = ma.zeros([len(target_dataset.values[
+ dataset_slices["lat_start"]:dataset_slices["lat_end"]]),
+ len(target_dataset.values[
+ dataset_slices["lon_start"]:dataset_slices["lon_end"]])])
+
+ subset_values = target_dataset.values[
+ dataset_slices["lat_start"]:dataset_slices["lat_end"] + 1,
+ dataset_slices["lon_start"]:dataset_slices["lon_end"] + 1]
+
+ elif target_dataset.values.ndim == 3:
+ subset_values = ma.zeros([len(target_dataset.values[
+ dataset_slices["time_start"]:dataset_slices["time_end"]]),
+ len(target_dataset.values[
+ dataset_slices["lat_start"]:dataset_slices["lat_end"]]),
+ len(target_dataset.values[
+ dataset_slices["lon_start"]:dataset_slices["lon_end"]])])
+
+ subset_values = target_dataset.values[
+ dataset_slices["time_start"]:dataset_slices["time_end"] + 1,
+ dataset_slices["lat_start"]:dataset_slices["lat_end"] + 1,
+ dataset_slices["lon_start"]:dataset_slices["lon_end"] + 1]
+
+ # Build new dataset with subset information
+ return ds.Dataset(
+ # Slice the lats array with our calculated slice indices
+ target_dataset.lats[dataset_slices["lat_start"]:
+ dataset_slices["lat_end"] + 1],
+ # Slice the lons array with our calculated slice indices
+ target_dataset.lons[dataset_slices["lon_start"]:
+ dataset_slices["lon_end"] + 1],
+ # Slice the times array with our calculated slice indices
+ target_dataset.times[dataset_slices["time_start"]:
+ dataset_slices["time_end"]+ 1],
+ # Slice the values array with our calculated slice indices
+ subset_values,
+ variable=target_dataset.variable,
+ units=target_dataset.units,
+ name=subregion_name,
+ origin=target_dataset.origin
+ )
def safe_subset(subregion, target_dataset, subregion_name=None):