You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sdap.apache.org by km...@apache.org on 2023/06/22 15:12:16 UTC

[incubator-sdap-nexus] branch tmp-aq-match-updates updated: Bug fix for numpy array issue

This is an automated email from the ASF dual-hosted git repository.

kmarlis pushed a commit to branch tmp-aq-match-updates
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git


The following commit(s) were added to refs/heads/tmp-aq-match-updates by this push:
     new b7a65fe  Bug fix for numpy array issue
b7a65fe is described below

commit b7a65fe56e1cc52508b264cc5105d92a5a8fee8a
Author: kevinmarlis <ke...@gmail.com>
AuthorDate: Thu Jun 22 08:11:48 2023 -0700

    Bug fix for numpy array issue
---
 analysis/webservice/algorithms_spark/MaximaMinimaSpark.py | 4 ++--
 analysis/webservice/algorithms_spark/VarianceSpark.py     | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/analysis/webservice/algorithms_spark/MaximaMinimaSpark.py b/analysis/webservice/algorithms_spark/MaximaMinimaSpark.py
index 7521d37..6f00344 100644
--- a/analysis/webservice/algorithms_spark/MaximaMinimaSpark.py
+++ b/analysis/webservice/algorithms_spark/MaximaMinimaSpark.py
@@ -180,9 +180,9 @@ class MaximaMinimaSparkHandlerImpl(NexusCalcSparkHandler):
                                                               self._maxLonCent))
 
         # Create array of tuples to pass to Spark map function
-        nexus_tiles_spark = [[self._find_tile_bounds(t),
+        nexus_tiles_spark = np.array([[self._find_tile_bounds(t),
                               self._startTime, self._endTime,
-                              self._ds] for t in nexus_tiles]
+                              self._ds] for t in nexus_tiles], dtype='object')
 
         # Remove empty tiles (should have bounds set to None)
         bad_tile_inds = np.where([t[0] is None for t in nexus_tiles_spark])[0]
diff --git a/analysis/webservice/algorithms_spark/VarianceSpark.py b/analysis/webservice/algorithms_spark/VarianceSpark.py
index 07922e6..6079f5c 100644
--- a/analysis/webservice/algorithms_spark/VarianceSpark.py
+++ b/analysis/webservice/algorithms_spark/VarianceSpark.py
@@ -180,9 +180,9 @@ class VarianceNexusSparkHandlerImpl(NexusCalcSparkHandler):
                                                               self._maxLonCent))
 
         # Create array of tuples to pass to Spark map function
-        nexus_tiles_spark = [[self._find_tile_bounds(t),
+        nexus_tiles_spark = np.array([[self._find_tile_bounds(t),
                               self._startTime, self._endTime,
-                              self._ds] for t in nexus_tiles]
+                              self._ds] for t in nexus_tiles], dtype='object')
 
         # Remove empty tiles (should have bounds set to None)
         bad_tile_inds = np.where([t[0] is None for t in nexus_tiles_spark])[0]