You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sdap.apache.org by rk...@apache.org on 2024/02/29 15:30:04 UTC

(incubator-sdap-nexus) 01/07: Revert "more module renaming"

This is an automated email from the ASF dual-hosted git repository.

rkk pushed a commit to branch SDAP-511-b
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git

commit 9dcfac237145f1cf02aa021eb3d2e233d44fc9d9
Author: rileykk <ri...@jpl.nasa.gov>
AuthorDate: Thu Feb 29 07:28:03 2024 -0800

    Revert "more module renaming"
    
    This reverts commit ea7d72429e853deb83b15982369c481a09ccc60f.
---
 analysis/webservice/algorithms/doms/insitusubset.py        |  2 +-
 analysis/webservice/algorithms_spark/Matchup.py            |  2 +-
 analysis/webservice/algorithms_spark/MatchupDoms.py        |  2 +-
 analysis/webservice/config/web.ini                         |  2 +-
 .../nexus_tornado/app_builders/HandlerArgsBuilder.py       | 14 +++++++-------
 .../nexus_tornado/app_builders/NexusAppBuilder.py          |  2 +-
 .../nexus_tornado/app_builders/SparkContextBuilder.py      |  2 +-
 tools/doms-data-tools/update_values_type.py                |  2 +-
 tools/domspurge/purge.py                                   |  4 ++--
 9 files changed, 16 insertions(+), 16 deletions(-)

diff --git a/analysis/webservice/algorithms/doms/insitusubset.py b/analysis/webservice/algorithms/doms/insitusubset.py
index 4fa0627..f2dcc30 100644
--- a/analysis/webservice/algorithms/doms/insitusubset.py
+++ b/analysis/webservice/algorithms/doms/insitusubset.py
@@ -211,7 +211,7 @@ class InSituSubsetResult(object):
 
 def query_edge(dataset, variable, startTime, endTime, bbox, platform, depth_min, depth_max, session, itemsPerPage=1000,
                startIndex=0, stats=True):
-    log = logging.getLogger('analysis.webservice.algorithms.doms.insitusubset.query_edge')
+    log = logging.getLogger('webservice.algorithms.doms.insitusubset.query_edge')
     try:
         startTime = datetime.utcfromtimestamp(startTime).strftime('%Y-%m-%dT%H:%M:%SZ')
     except TypeError:
diff --git a/analysis/webservice/algorithms_spark/Matchup.py b/analysis/webservice/algorithms_spark/Matchup.py
index 8281353..b3420f3 100644
--- a/analysis/webservice/algorithms_spark/Matchup.py
+++ b/analysis/webservice/algorithms_spark/Matchup.py
@@ -957,7 +957,7 @@ def match_satellite_to_insitu(tile_ids, primary_b, secondary_b, parameter_b, tt_
 def match_tile_to_point_generator(tile_service, tile_id, m_tree, edge_results, search_domain_bounding_wkt,
                                   search_parameter, radius_tolerance, aeqd_proj):
     from nexustiles.model.nexusmodel import NexusPoint
-    from analysis.webservice.algorithms_spark.Matchup import DomsPoint  # Must import DomsPoint or Spark complains
+    from webservice.algorithms_spark.Matchup import DomsPoint  # Must import DomsPoint or Spark complains
 
     # Load tile
     try:
diff --git a/analysis/webservice/algorithms_spark/MatchupDoms.py b/analysis/webservice/algorithms_spark/MatchupDoms.py
index 0a33937..bd3971e 100644
--- a/analysis/webservice/algorithms_spark/MatchupDoms.py
+++ b/analysis/webservice/algorithms_spark/MatchupDoms.py
@@ -739,7 +739,7 @@ def match_satellite_to_insitu(tile_ids, primary_b, secondary_b, parameter_b, tt_
 def match_tile_to_point_generator(tile_service, tile_id, m_tree, edge_results, search_domain_bounding_wkt,
                                   search_parameter, radius_tolerance, aeqd_proj):
     from nexustiles.model.nexusmodel import NexusPoint
-    from analysis.webservice.algorithms_spark.MatchupDoms import DomsPoint  # Must import DomsPoint or Spark complains
+    from webservice.algorithms_spark.MatchupDoms import DomsPoint  # Must import DomsPoint or Spark complains
 
     # Load tile
     try:
diff --git a/analysis/webservice/config/web.ini b/analysis/webservice/config/web.ini
index 473059a..8584975 100644
--- a/analysis/webservice/config/web.ini
+++ b/analysis/webservice/config/web.ini
@@ -29,4 +29,4 @@ static_enabled=true
 static_dir=static
 
 [modules]
-module_dirs=analysis.webservice.algorithms,analysis.webservice.algorithms_spark,analysis.webservice.algorithms.doms
\ No newline at end of file
+module_dirs=webservice.algorithms,webservice.algorithms_spark,webservice.algorithms.doms
\ No newline at end of file
diff --git a/analysis/webservice/nexus_tornado/app_builders/HandlerArgsBuilder.py b/analysis/webservice/nexus_tornado/app_builders/HandlerArgsBuilder.py
index da30828..11ca340 100644
--- a/analysis/webservice/nexus_tornado/app_builders/HandlerArgsBuilder.py
+++ b/analysis/webservice/nexus_tornado/app_builders/HandlerArgsBuilder.py
@@ -34,17 +34,17 @@ class HandlerArgsBuilder:
     @staticmethod
     def handler_needs_algorithm_config(class_wrapper):
         return (
-                class_wrapper == analysis.webservice.algorithms_spark.Matchup.Matchup
-                or class_wrapper == analysis.webservice.algorithms_spark.MatchupDoms.MatchupDoms
-                or issubclass(class_wrapper, analysis.webservice.algorithms.doms.BaseDomsHandler.BaseDomsQueryCalcHandler)
+                class_wrapper == webservice.algorithms_spark.Matchup.Matchup
+                or class_wrapper == webservice.algorithms_spark.MatchupDoms.MatchupDoms
+                or issubclass(class_wrapper, webservice.algorithms.doms.BaseDomsHandler.BaseDomsQueryCalcHandler)
                 or issubclass(class_wrapper,
-                              analysis.webservice.algorithms_spark.NexusCalcSparkTornadoHandler.NexusCalcSparkTornadoHandler)
-                or class_wrapper == analysis.webservice.algorithms.doms.ResultsRetrieval.DomsResultsRetrievalHandler
+                              webservice.algorithms_spark.NexusCalcSparkTornadoHandler.NexusCalcSparkTornadoHandler)
+                or class_wrapper == webservice.algorithms.doms.ResultsRetrieval.DomsResultsRetrievalHandler
         )
 
     @staticmethod
     def handler_needs_remote_collections(class_wrapper):
-        return class_wrapper == analysis.webservice.algorithms.DataSeriesList.DataSeriesListCalcHandlerImpl
+        return class_wrapper == webservice.algorithms.DataSeriesList.DataSeriesListCalcHandlerImpl
 
     def get_args(self, clazz_wrapper):
         args = dict(
@@ -53,7 +53,7 @@ class HandlerArgsBuilder:
             thread_pool=self.request_thread_pool
         )
 
-        if issubclass(clazz_wrapper, analysis.webservice.algorithms_spark.NexusCalcSparkHandler.NexusCalcSparkHandler):
+        if issubclass(clazz_wrapper, webservice.algorithms_spark.NexusCalcSparkHandler.NexusCalcSparkHandler):
             args['sc'] = SparkContextBuilder.get_spark_context()
 
         if self.handler_needs_algorithm_config(clazz_wrapper):
diff --git a/analysis/webservice/nexus_tornado/app_builders/NexusAppBuilder.py b/analysis/webservice/nexus_tornado/app_builders/NexusAppBuilder.py
index 3c6a602..cc0d341 100644
--- a/analysis/webservice/nexus_tornado/app_builders/NexusAppBuilder.py
+++ b/analysis/webservice/nexus_tornado/app_builders/NexusAppBuilder.py
@@ -38,7 +38,7 @@ class NexusAppBuilder:
         self.handlers.append(
             (r'/apidocs', tornado.web.RedirectHandler, {"url": "/apidocs/"}))
 
-        apidocs_path = pkg_resources.resource_filename('analysis.webservice.apidocs', '')
+        apidocs_path = pkg_resources.resource_filename('webservice.apidocs', '')
         self.handlers.append(
             (
                 r'/apidocs/(.*)', tornado.web.StaticFileHandler,
diff --git a/analysis/webservice/nexus_tornado/app_builders/SparkContextBuilder.py b/analysis/webservice/nexus_tornado/app_builders/SparkContextBuilder.py
index af1ad39..5daf279 100644
--- a/analysis/webservice/nexus_tornado/app_builders/SparkContextBuilder.py
+++ b/analysis/webservice/nexus_tornado/app_builders/SparkContextBuilder.py
@@ -26,7 +26,7 @@ class SparkContextBuilder:
         if cls.spark_context is None:
             from pyspark.sql import SparkSession
 
-            scheduler_path = pkg_resources.resource_filename('analysis.webservice', "config/scheduler.xml")
+            scheduler_path = pkg_resources.resource_filename('webservice', "config/scheduler.xml")
 
             spark = SparkSession.builder.appName("nexus-analysis").config(
                 "spark.scheduler.allocation.file", scheduler_path
diff --git a/tools/doms-data-tools/update_values_type.py b/tools/doms-data-tools/update_values_type.py
index 7193e47..36810f1 100644
--- a/tools/doms-data-tools/update_values_type.py
+++ b/tools/doms-data-tools/update_values_type.py
@@ -26,7 +26,7 @@ from cassandra.policies import (DCAwareRoundRobinPolicy, TokenAwarePolicy,
                                 WhiteListRoundRobinPolicy)
 
 try:
-    logging.getLogger('analysis.webservice.NexusHandler').setLevel(logging.CRITICAL)
+    logging.getLogger('webservice.NexusHandler').setLevel(logging.CRITICAL)
 except:
     pass
 
diff --git a/tools/domspurge/purge.py b/tools/domspurge/purge.py
index d1d6485..d4bb15a 100644
--- a/tools/domspurge/purge.py
+++ b/tools/domspurge/purge.py
@@ -30,8 +30,8 @@ from six.moves import input
 from tqdm import tqdm
 
 try:
-    logging.getLogger('analysis.webservice.NexusHandler').setLevel(logging.CRITICAL)
-    from analysis.webservice.algorithms.doms.DomsInitialization import DomsInitializer
+    logging.getLogger('webservice.NexusHandler').setLevel(logging.CRITICAL)
+    from webservice.algorithms.doms.DomsInitialization import DomsInitializer
 except ImportError:
     from DomsInitialization import DomsInitializer