You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@climate.apache.org by le...@apache.org on 2016/02/27 11:36:00 UTC

[2/2] climate git commit: CLIMATE-768 - Fix failing tests in test_dap.py. Add extra logic to dap.py to check for named dimensions.

CLIMATE-768 - Fix failing tests in test_dap.py.  Add extra logic to dap.py to check for named dimensions.


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/4e36c1db
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/4e36c1db
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/4e36c1db

Branch: refs/heads/master
Commit: 4e36c1db769959b870f4e49355731e19388c92d3
Parents: de190c6
Author: MichaelArthurAnderson <mi...@gmail.com>
Authored: Fri Feb 26 13:28:46 2016 -0500
Committer: MichaelArthurAnderson <mi...@gmail.com>
Committed: Fri Feb 26 13:28:46 2016 -0500

----------------------------------------------------------------------
 ocw/data_source/dap.py | 20 +++++++++++++-------
 1 file changed, 13 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/4e36c1db/ocw/data_source/dap.py
----------------------------------------------------------------------
diff --git a/ocw/data_source/dap.py b/ocw/data_source/dap.py
index 7b6e216..fc9d2a3 100644
--- a/ocw/data_source/dap.py
+++ b/ocw/data_source/dap.py
@@ -17,10 +17,10 @@
 
 from pydap.client import open_url
 from netcdftime import utime
-import requests
 import numpy as np
 from ocw.dataset import Dataset
 
+
 def load(url, variable, name=''):
     '''Load a Dataset from an OpenDAP URL
 
@@ -42,16 +42,21 @@ def load(url, variable, name=''):
     d = open_url(url)
     dataset = d[variable]
 
-    # Grab the lat, lon, and time variable names.
-    # We assume the variable order is (time, lat, lon)
+    # By convention, but not by standard, if the dimensions exist, they will be in the order:
+    # time (t), altitude (z), latitude (y), longitude (x)
+    # but conventions aren't always followed and all dimensions aren't always present so
+    # see if we can make some educated deductions before defaulting to just pulling the first three
+    # columns.
+    temp_dimensions = map(lambda x:x.lower(),dataset.dimensions)
+
     dataset_dimensions = dataset.dimensions
-    time = dataset_dimensions[0]
-    lat = dataset_dimensions[1]
-    lon = dataset_dimensions[2]
+    time = dataset_dimensions[temp_dimensions.index('time') if 'time' in temp_dimensions else 0]
+    lat = dataset_dimensions[temp_dimensions.index('lat') if 'lat' in temp_dimensions else 1]
+    lon = dataset_dimensions[temp_dimensions.index('lon') if 'lon' in temp_dimensions else 2]
 
     # Time is given to us in some units since an epoch. We need to convert
     # these values to datetime objects. Note that we use the main object's
-    # time object and not the dataset specific reference to it. We need to 
+    # time object and not the dataset specific reference to it. We need to
     # grab the 'units' from it and it fails on the dataset specific object.
     times = np.array(_convert_times_to_datetime(d[time]))
 
@@ -67,6 +72,7 @@ def load(url, variable, name=''):
     return Dataset(lats, lons, times, values, variable,
                    name=name, origin=origin)
 
+
 def _convert_times_to_datetime(time):
     '''Convert the OpenDAP time object's values to datetime objects