You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sdap.apache.org by ea...@apache.org on 2020/08/26 00:33:01 UTC
[incubator-sdap-nexus] branch support-deseason updated: wip
This is an automated email from the ASF dual-hosted git repository.
eamonford pushed a commit to branch support-deseason
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git
The following commit(s) were added to refs/heads/support-deseason by this push:
new c929bc5 wip
c929bc5 is described below
commit c929bc56995d6f83366ab088ce2399447dc50166
Author: Eamon Ford <ea...@gmail.com>
AuthorDate: Tue Aug 25 17:32:49 2020 -0700
wip
---
.../webservice/algorithms_spark/TimeSeriesSpark.py | 46 ++++++++++++++--------
1 file changed, 29 insertions(+), 17 deletions(-)
diff --git a/analysis/webservice/algorithms_spark/TimeSeriesSpark.py b/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
index 43f7f6d..d56b46b 100644
--- a/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
+++ b/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
@@ -117,7 +117,7 @@ class TimeSeriesSparkHandlerImpl(NexusCalcSparkHandler):
except:
try:
west, south, east, north = request.get_min_lon(), request.get_min_lat(), \
- request.get_max_lon(), request.get_max_lat()
+ request.get_max_lon(), request.get_max_lat()
bounding_polygon = shapely.geometry.Polygon(
[(west, south), (east, south), (east, north), (west, north), (west, south)])
except:
@@ -160,7 +160,7 @@ class TimeSeriesSparkHandlerImpl(NexusCalcSparkHandler):
def calc(self, request, **args):
"""
-
+
:param request: StatsComputeOptions
:param args: dict
:return:
@@ -176,13 +176,13 @@ class TimeSeriesSparkHandlerImpl(NexusCalcSparkHandler):
the_time = datetime.now()
daysinrange = self._get_tile_service().find_days_in_range_asc(bounding_polygon.bounds[1],
- bounding_polygon.bounds[3],
- bounding_polygon.bounds[0],
- bounding_polygon.bounds[2],
- shortName,
- start_seconds_from_epoch,
- end_seconds_from_epoch,
- metrics_callback=metrics_record.record_metrics)
+ bounding_polygon.bounds[3],
+ bounding_polygon.bounds[0],
+ bounding_polygon.bounds[2],
+ shortName,
+ start_seconds_from_epoch,
+ end_seconds_from_epoch,
+ metrics_callback=metrics_record.record_metrics)
self.log.info("Finding days in range took %s for dataset %s" % (str(datetime.now() - the_time), shortName))
ndays = len(daysinrange)
@@ -203,7 +203,19 @@ class TimeSeriesSparkHandlerImpl(NexusCalcSparkHandler):
if apply_seasonal_cycle_filter:
the_time = datetime.now()
+ # get time series for _clim dataset
+ daysinrange_clim = self._get_tile_service().find_days_in_range_asc(bounding_polygon.bounds[1],
+ bounding_polygon.bounds[3],
+ bounding_polygon.bounds[0],
+ bounding_polygon.bounds[2],
+ shortName,
+ 0,
+ 31535999,
+ metrics_callback=metrics_record.record_metrics)
+
for result in results:
+ # aline _clim time series with original time series
+
month = datetime.utcfromtimestamp(result['time']).month
month_mean, month_max, month_min = self.calculate_monthly_average(month, bounding_polygon.wkt,
shortName)
@@ -288,12 +300,12 @@ class TimeSeriesSparkHandlerImpl(NexusCalcSparkHandler):
start = (pytz.UTC.localize(beginning_of_month) - EPOCH).total_seconds()
end = (pytz.UTC.localize(end_of_month) - EPOCH).total_seconds()
tile_stats = self._get_tile_service().find_tiles_in_polygon(bounding_polygon, ds, start, end,
- fl=('id,'
- 'tile_avg_val_d,tile_count_i,'
- 'tile_min_val_d,tile_max_val_d,'
- 'tile_min_lat,tile_max_lat,'
- 'tile_min_lon,tile_max_lon'),
- fetch_data=False)
+ fl=('id,'
+ 'tile_avg_val_d,tile_count_i,'
+ 'tile_min_val_d,tile_max_val_d,'
+ 'tile_min_lat,tile_max_lat,'
+ 'tile_min_lon,tile_max_lon'),
+ fetch_data=False)
if len(tile_stats) == 0:
continue
@@ -338,8 +350,8 @@ class TimeSeriesSparkHandlerImpl(NexusCalcSparkHandler):
weights = np.array(monthly_counts) / count_sum
return np.average(monthly_averages, None, weights).item(), \
- np.average(monthly_averages, None, weights).item(), \
- np.average(monthly_averages, None, weights).item()
+ np.average(monthly_averages, None, weights).item(), \
+ np.average(monthly_averages, None, weights).item()
@lru_cache()
def get_min_max_date(self, ds=None):