You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sdap.apache.org by tl...@apache.org on 2020/07/09 01:28:06 UTC

[incubator-sdap-nexus] branch master updated: SDAP-249 : add solr time out client argument to webapp, use it in the data acces… (#101)

This is an automated email from the ASF dual-hosted git repository.

tloubrieu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git


The following commit(s) were added to refs/heads/master by this push:
     new f77b0f3  SDAP-249 : add solr time out client argument to webapp, use it in the data acces… (#101)
f77b0f3 is described below

commit f77b0f3427372473146853ef592de0ea23548655
Author: thomas loubrieu <60...@users.noreply.github.com>
AuthorDate: Wed Jul 8 18:27:59 2020 -0700

    SDAP-249 : add solr time out client argument to webapp, use it in the data acces… (#101)
    
    
    * add solr time out client argument to webapp, use it in the data access configuration
    
    * add corrections after review (mostly new unit test)
    
    Co-authored-by: thomas loubrieu <th...@jpl.nasa.gov>
---
 .../tests/algorithms/longitudelatitudemap_test.py  |  6 +-
 analysis/tests/config/algorithms.ini               |  5 ++
 analysis/tests/webapp_test.py                      | 69 ++++++++++++++++++++++
 analysis/webservice/NexusHandler.py                | 16 ++++-
 analysis/webservice/algorithms/ColorBarHandler.py  |  4 +-
 analysis/webservice/algorithms/CorrelationMap.py   |  4 +-
 .../algorithms/DailyDifferenceAverage.py           |  2 +-
 .../webservice/algorithms/DataInBoundsSearch.py    |  4 +-
 analysis/webservice/algorithms/DataSeriesList.py   |  2 +-
 analysis/webservice/algorithms/Heartbeat.py        |  2 +-
 analysis/webservice/algorithms/HofMoeller.py       |  4 +-
 .../webservice/algorithms/LongitudeLatitudeMap.py  |  2 +-
 analysis/webservice/algorithms/MapFetchHandler.py  | 12 ++--
 .../algorithms/StandardDeviationSearch.py          |  4 +-
 analysis/webservice/algorithms/TileSearch.py       |  2 +-
 analysis/webservice/algorithms/TimeAvgMap.py       |  6 +-
 analysis/webservice/algorithms/TimeSeries.py       | 12 ++--
 analysis/webservice/algorithms/TimeSeriesSolr.py   |  2 +-
 .../webservice/algorithms/doms/DatasetListQuery.py |  2 +-
 .../webservice/algorithms/doms/MatchupQuery.py     |  4 +-
 .../webservice/algorithms_spark/CorrMapSpark.py    |  2 +-
 .../DailyDifferenceAverageSpark.py                 |  2 +-
 .../webservice/algorithms_spark/HofMoellerSpark.py |  4 +-
 analysis/webservice/algorithms_spark/Matchup.py    |  8 +--
 .../webservice/algorithms_spark/TimeAvgMapSpark.py |  2 +-
 .../webservice/algorithms_spark/TimeSeriesSpark.py | 12 ++--
 .../webservice/algorithms_spark/VarianceSpark.py   |  2 +-
 analysis/webservice/webapp.py                      | 26 +++++++-
 data-access/nexustiles/dao/SolrProxy.py            |  7 ++-
 data-access/nexustiles/nexustiles.py               | 19 ++++--
 30 files changed, 184 insertions(+), 64 deletions(-)

diff --git a/analysis/tests/algorithms/longitudelatitudemap_test.py b/analysis/tests/algorithms/longitudelatitudemap_test.py
index b47ce70..3c2475b 100644
--- a/analysis/tests/algorithms/longitudelatitudemap_test.py
+++ b/analysis/tests/algorithms/longitudelatitudemap_test.py
@@ -18,14 +18,14 @@ import time
 import unittest
 from multiprocessing.pool import ThreadPool
 
-from NexusHandler import AlgorithmModuleWrapper
+from webservice.NexusHandler import AlgorithmModuleWrapper
 from mock import MagicMock
 from nexustiles.nexustiles import NexusTileService
 from shapely.geometry import box
 from tornado.testing import bind_unused_port
 from tornado.web import Application
-from webapp import ModularNexusHandlerWrapper
-from webmodel import NexusRequestObject
+from webservice.webapp import ModularNexusHandlerWrapper
+from webservice.webmodel import NexusRequestObject
 
 from webservice.algorithms import LongitudeLatitudeMap
 
diff --git a/analysis/tests/config/algorithms.ini b/analysis/tests/config/algorithms.ini
new file mode 100644
index 0000000..fae7ae5
--- /dev/null
+++ b/analysis/tests/config/algorithms.ini
@@ -0,0 +1,5 @@
+[multiprocessing]
+maxprocesses=8
+
+[spark]
+maxconcurrentjobs=10
\ No newline at end of file
diff --git a/analysis/tests/webapp_test.py b/analysis/tests/webapp_test.py
new file mode 100644
index 0000000..2f9a251
--- /dev/null
+++ b/analysis/tests/webapp_test.py
@@ -0,0 +1,69 @@
+import unittest
+import pkg_resources
+import ConfigParser
+import sys
+import logging
+import mock
+from webservice.webapp import inject_args_in_config
+
+logging.basicConfig(
+        level=logging.DEBUG,
+        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+        datefmt="%Y-%m-%dT%H:%M:%S", stream=sys.stdout)
+
+log = logging.getLogger(__name__)
+
+
+class MyTestCase(unittest.TestCase):
+
+    @mock.patch('tornado.options')
+    @mock.patch('tornado.options._Option')
+    def test_inject_args_in_config_nominal_newoption(self, mock_options, mock_option):
+
+        mock_option.name = 'solr_time_out'
+        mock_option.value.return_value = '60'
+        mock_options._options = {'solr-time-out' : mock_option}
+
+        algorithm_config = ConfigParser.RawConfigParser()
+        algorithm_config.readfp(pkg_resources.resource_stream(__name__, "config/algorithms.ini"),
+                                filename='algorithms.ini')
+
+        inject_args_in_config(mock_options, algorithm_config)
+
+        self.assertEqual('60', algorithm_config.get('solr', 'time_out'))
+
+    @mock.patch('tornado.options')
+    @mock.patch('tornado.options._Option')
+    def test_inject_args_in_config_nominal_existingoption(self, mock_options, mock_option):
+        mock_option.name = 'multiprocessing_maxprocesses'
+        mock_option.value.return_value = '60'
+        mock_options._options = {'multiprocessing-maxprocesses': mock_option}
+
+        algorithm_config = ConfigParser.RawConfigParser()
+        algorithm_config.readfp(pkg_resources.resource_stream(__name__, "config/algorithms.ini"),
+                                filename='algorithms.ini')
+
+        inject_args_in_config(mock_options, algorithm_config)
+
+        self.assertEqual('60', algorithm_config.get('multiprocessing', 'maxprocesses'))
+
+
+    @mock.patch('tornado.options')
+    @mock.patch('tornado.options._Option')
+    def test_inject_args_in_config_nosection(self, mock_options, mock_option):
+        mock_option.name = 'port'
+        mock_option.value.return_value = '8080'
+        mock_options._options = {'port': mock_option}
+
+        algorithm_config = ConfigParser.RawConfigParser()
+        algorithm_config.readfp(pkg_resources.resource_stream(__name__, "config/algorithms.ini"),
+                                filename='algorithms.ini')
+
+        inject_args_in_config(mock_options, algorithm_config)
+
+        # nothing should happend we just check that there is no section named after the option
+        self.assertEqual(False, algorithm_config.has_section('port'))
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/analysis/webservice/NexusHandler.py b/analysis/webservice/NexusHandler.py
index 87d2af7..1fc035d 100644
--- a/analysis/webservice/NexusHandler.py
+++ b/analysis/webservice/NexusHandler.py
@@ -211,11 +211,21 @@ class NexusHandler(CalcHandler):
         CalcHandler.__init__(self)
 
         self.algorithm_config = None
-        self._tile_service = NexusTileService(skipCassandra, skipSolr)
+        self._skipCassandra = skipCassandra
+        self._skipSolr = skipSolr
+        self.__tile_service = None  # instantiate the tile service after config is fully loaded
 
     def set_config(self, algorithm_config):
         self.algorithm_config = algorithm_config
 
+    def _get_tile_service(self):
+        if self.__tile_service is None:
+            self.__tile_service = NexusTileService(skipDatastore=self._skipCassandra,
+                                                   skipMetadatastore=self._skipSolr,
+                                                   config=self.algorithm_config)
+        return self.__tile_service
+
+
     def _mergeDicts(self, x, y):
         z = x.copy()
         z.update(y)
@@ -377,7 +387,7 @@ class SparkHandler(NexusHandler):
             ds = self._ds
 
         # See what time stamps are in the specified range.
-        t_in_range = self._tile_service.find_days_in_range_asc(self._minLat,
+        t_in_range = self._get_tile_service().find_days_in_range_asc(self._minLat,
                                                                self._maxLat,
                                                                self._minLon,
                                                                self._maxLon,
@@ -393,7 +403,7 @@ class SparkHandler(NexusHandler):
         # Check one time stamp at a time and attempt to extract the global
         # tile set.
         for t in t_in_range:
-            nexus_tiles = self._tile_service.get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon,
+            nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon,
                                                                       self._maxLon, ds=ds, start_time=t, end_time=t,
                                                                       metrics_callback=metrics_callback)
             if self._set_info_from_tile_set(nexus_tiles):
diff --git a/analysis/webservice/algorithms/ColorBarHandler.py b/analysis/webservice/algorithms/ColorBarHandler.py
index ddb145c..52429c6 100644
--- a/analysis/webservice/algorithms/ColorBarHandler.py
+++ b/analysis/webservice/algorithms/ColorBarHandler.py
@@ -65,10 +65,10 @@ class ColorBarHandler(BaseHandler):
         dataTimeStart = dataTime - 86400.0  # computeOptions.get_datetime_arg("t", None)
         dataTimeEnd = dataTime
 
-        daysinrange = self._tile_service.find_days_in_range_asc(-90.0, 90.0, -180.0, 180.0, ds, dataTimeStart,
+        daysinrange = self._get_tile_service().find_days_in_range_asc(-90.0, 90.0, -180.0, 180.0, ds, dataTimeStart,
                                                                 dataTimeEnd)
 
-        ds1_nexus_tiles = self._tile_service.get_tiles_bounded_by_box_at_time(-90.0, 90.0, -180.0, 180.0,
+        ds1_nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box_at_time(-90.0, 90.0, -180.0, 180.0,
                                                                               ds,
                                                                               daysinrange[0])
 
diff --git a/analysis/webservice/algorithms/CorrelationMap.py b/analysis/webservice/algorithms/CorrelationMap.py
index ac6e19d..7f864a5 100644
--- a/analysis/webservice/algorithms/CorrelationMap.py
+++ b/analysis/webservice/algorithms/CorrelationMap.py
@@ -56,9 +56,9 @@ class LongitudeLatitudeMapHandlerImpl(NexusHandler):
         if not len(ds) == 2:
             raise Exception("Requires two datasets for comparison. Specify request parameter ds=Dataset_1,Dataset_2")
 
-        ds1tiles = self._tile_service.find_tiles_in_polygon(box(minLon, minLat, maxLon, maxLat), ds[0], startTime,
+        ds1tiles = self._get_tile_service().find_tiles_in_polygon(box(minLon, minLat, maxLon, maxLat), ds[0], startTime,
                                                             endTime)
-        ds2tiles = self._tile_service.find_tiles_in_polygon(box(minLon, minLat, maxLon, maxLat), ds[1], startTime,
+        ds2tiles = self._get_tile_service().find_tiles_in_polygon(box(minLon, minLat, maxLon, maxLat), ds[1], startTime,
                                                             endTime)
 
         matches = self._match_tiles(ds1tiles, ds2tiles)
diff --git a/analysis/webservice/algorithms/DailyDifferenceAverage.py b/analysis/webservice/algorithms/DailyDifferenceAverage.py
index 802a266..8861d09 100644
--- a/analysis/webservice/algorithms/DailyDifferenceAverage.py
+++ b/analysis/webservice/algorithms/DailyDifferenceAverage.py
@@ -143,7 +143,7 @@ class DailyDifferenceAverageImpl(NexusHandler):
                                              start_time,
                                              end_time):
 
-        daysinrange = self._tile_service.find_days_in_range_asc(min_lat, max_lat, min_lon, max_lon, dataset1,
+        daysinrange = self._get_tile_service().find_days_in_range_asc(min_lat, max_lat, min_lon, max_lon, dataset1,
                                                                 start_time, end_time)
 
         maxprocesses = int(self.algorithm_config.get("multiprocessing", "maxprocesses"))
diff --git a/analysis/webservice/algorithms/DataInBoundsSearch.py b/analysis/webservice/algorithms/DataInBoundsSearch.py
index 80f0416..7c426c7 100644
--- a/analysis/webservice/algorithms/DataInBoundsSearch.py
+++ b/analysis/webservice/algorithms/DataInBoundsSearch.py
@@ -131,10 +131,10 @@ class DataInBoundsSearchHandlerImpl(NexusHandler):
             min_lon = bounding_polygon.bounds[0]
             max_lon = bounding_polygon.bounds[2]
 
-            tiles = self._tile_service.get_tiles_bounded_by_box(min_lat, max_lat, min_lon, max_lon, ds, start_time,
+            tiles = self._get_tile_service().get_tiles_bounded_by_box(min_lat, max_lat, min_lon, max_lon, ds, start_time,
                                                                 end_time)
         else:
-            tiles = self._tile_service.get_tiles_by_metadata(metadata_filter, ds, start_time, end_time)
+            tiles = self._get_tile_service().get_tiles_by_metadata(metadata_filter, ds, start_time, end_time)
 
         data = []
         for tile in tiles:
diff --git a/analysis/webservice/algorithms/DataSeriesList.py b/analysis/webservice/algorithms/DataSeriesList.py
index 239a4c2..cba5590 100644
--- a/analysis/webservice/algorithms/DataSeriesList.py
+++ b/analysis/webservice/algorithms/DataSeriesList.py
@@ -40,4 +40,4 @@ class DataSeriesListHandlerImpl(NexusHandler):
             def toJson(self):
                 return json.dumps(self.result)
 
-        return SimpleResult(self._tile_service.get_dataseries_list())
+        return SimpleResult(self._get_tile_service().get_dataseries_list())
diff --git a/analysis/webservice/algorithms/Heartbeat.py b/analysis/webservice/algorithms/Heartbeat.py
index e72287c..fd69bfd 100644
--- a/analysis/webservice/algorithms/Heartbeat.py
+++ b/analysis/webservice/algorithms/Heartbeat.py
@@ -31,7 +31,7 @@ class HeartbeatHandlerImpl(NexusHandler):
         NexusHandler.__init__(self, skipCassandra=True)
 
     def calc(self, computeOptions, **args):
-        solrOnline = self._tile_service.pingSolr()
+        solrOnline = self._get_tile_service().pingSolr()
 
         # Not sure how to best check cassandra cluster status so just return True for now
         cassOnline = True
diff --git a/analysis/webservice/algorithms/HofMoeller.py b/analysis/webservice/algorithms/HofMoeller.py
index e1fffce..929e03c 100644
--- a/analysis/webservice/algorithms/HofMoeller.py
+++ b/analysis/webservice/algorithms/HofMoeller.py
@@ -128,7 +128,7 @@ class LatitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerHandlerImpl):
         BaseHoffMoellerHandlerImpl.__init__(self)
 
     def calc(self, computeOptions, **args):
-        tiles = self._tile_service.get_tiles_bounded_by_box(computeOptions.get_min_lat(), computeOptions.get_max_lat(),
+        tiles = self._get_tile_service().get_tiles_bounded_by_box(computeOptions.get_min_lat(), computeOptions.get_max_lat(),
                                                             computeOptions.get_min_lon(), computeOptions.get_max_lon(),
                                                             computeOptions.get_dataset()[0],
                                                             computeOptions.get_start_time(),
@@ -194,7 +194,7 @@ class LongitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerHandlerImpl):
         BaseHoffMoellerHandlerImpl.__init__(self)
 
     def calc(self, computeOptions, **args):
-        tiles = self._tile_service.get_tiles_bounded_by_box(computeOptions.get_min_lat(), computeOptions.get_max_lat(),
+        tiles = self._get_tile_service().get_tiles_bounded_by_box(computeOptions.get_min_lat(), computeOptions.get_max_lat(),
                                                             computeOptions.get_min_lon(), computeOptions.get_max_lon(),
                                                             computeOptions.get_dataset()[0],
                                                             computeOptions.get_start_time(),
diff --git a/analysis/webservice/algorithms/LongitudeLatitudeMap.py b/analysis/webservice/algorithms/LongitudeLatitudeMap.py
index 80f22dc..3277683 100644
--- a/analysis/webservice/algorithms/LongitudeLatitudeMap.py
+++ b/analysis/webservice/algorithms/LongitudeLatitudeMap.py
@@ -115,7 +115,7 @@ class LongitudeLatitudeMapHandlerImpl(NexusHandler):
 
         ds, bounding_polygon, start_seconds_from_epoch, end_seconds_from_epoch = self.parse_arguments(request)
 
-        boxes = self._tile_service.get_distinct_bounding_boxes_in_polygon(bounding_polygon, ds,
+        boxes = self._get_tile_service().get_distinct_bounding_boxes_in_polygon(bounding_polygon, ds,
                                                                           start_seconds_from_epoch,
                                                                           end_seconds_from_epoch)
         point_avg_over_time = lat_lon_map_driver(bounding_polygon, start_seconds_from_epoch, end_seconds_from_epoch, ds,
diff --git a/analysis/webservice/algorithms/MapFetchHandler.py b/analysis/webservice/algorithms/MapFetchHandler.py
index 88e2a1d..9c6be94 100644
--- a/analysis/webservice/algorithms/MapFetchHandler.py
+++ b/analysis/webservice/algorithms/MapFetchHandler.py
@@ -228,13 +228,13 @@ class MapFetchHandler(BaseHandler):
         width = np.min([8192, computeOptions.get_int_arg("width", 1024)])
         height = np.min([8192, computeOptions.get_int_arg("height", 512)])
 
-        stats = self._tile_service.get_dataset_overall_stats(ds)
+        stats = self._get_tile_service().get_dataset_overall_stats(ds)
 
-        daysinrange = self._tile_service.find_days_in_range_asc(-90.0, 90.0, -180.0, 180.0, ds, dataTimeStart,
+        daysinrange = self._get_tile_service().find_days_in_range_asc(-90.0, 90.0, -180.0, 180.0, ds, dataTimeStart,
                                                                 dataTimeEnd)
 
         if len(daysinrange) > 0:
-            ds1_nexus_tiles = self._tile_service.get_tiles_bounded_by_box_at_time(-90.0, 90.0, -180.0, 180.0,
+            ds1_nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box_at_time(-90.0, 90.0, -180.0, 180.0,
                                                                                   ds,
                                                                                   daysinrange[0])
 
@@ -289,9 +289,9 @@ class MapFetchHandler(BaseHandler):
         else:
             time_interval = relativedelta(months=+1)
 
-        stats = self._tile_service.get_dataset_overall_stats(ds)
+        stats = self._get_tile_service().get_dataset_overall_stats(ds)
 
-        start_time, end_time = self._tile_service.get_min_max_time_by_granule(ds, granule_name)
+        start_time, end_time = self._get_tile_service().get_min_max_time_by_granule(ds, granule_name)
 
         MRF.create_all(ds, prefix)
 
@@ -306,7 +306,7 @@ class MapFetchHandler(BaseHandler):
         while start_time <= end_time:
             one_interval_later = start_time + time_interval
             temp_end_time = one_interval_later - relativedelta(minutes=+1)  # prevent getting tiles for 2 intervals
-            ds1_nexus_tiles = self._tile_service.find_tiles_in_box(-90.0, 90.0, -180.0, 180.0, ds, start_time,
+            ds1_nexus_tiles = self._get_tile_service().find_tiles_in_box(-90.0, 90.0, -180.0, 180.0, ds, start_time,
                                                                    temp_end_time)
 
             if ds1_nexus_tiles is not None:
diff --git a/analysis/webservice/algorithms/StandardDeviationSearch.py b/analysis/webservice/algorithms/StandardDeviationSearch.py
index b3182d5..0f924c4 100644
--- a/analysis/webservice/algorithms/StandardDeviationSearch.py
+++ b/analysis/webservice/algorithms/StandardDeviationSearch.py
@@ -114,10 +114,10 @@ class StandardDeviationSearchHandlerImpl(NexusHandler):
         ds, longitude, latitude, day_of_year, return_all = self.parse_arguments(request)
 
         if return_all:
-            func = partial(get_all_std_dev, tile_service=self._tile_service, ds=ds, longitude=longitude,
+            func = partial(get_all_std_dev, tile_service=self._get_tile_service(), ds=ds, longitude=longitude,
                            latitude=latitude, day_of_year=day_of_year)
         else:
-            func = partial(get_single_std_dev, tile_service=self._tile_service, ds=ds, longitude=longitude,
+            func = partial(get_single_std_dev, tile_service=self._get_tile_service(), ds=ds, longitude=longitude,
                            latitude=latitude, day_of_year=day_of_year)
 
         try:
diff --git a/analysis/webservice/algorithms/TileSearch.py b/analysis/webservice/algorithms/TileSearch.py
index 9ee8486..8bfcdbb 100644
--- a/analysis/webservice/algorithms/TileSearch.py
+++ b/analysis/webservice/algorithms/TileSearch.py
@@ -75,7 +75,7 @@ class ChunkSearchHandlerImpl(NexusHandler):
         endTime = computeOptions.get_end_time()
         # TODO update to expect tile objects back
         res = [tile.get_summary() for tile in
-               self._tile_service.find_tiles_in_box(minLat, maxLat, minLon, maxLon, ds, startTime, endTime,
+               self._get_tile_service().find_tiles_in_box(minLat, maxLat, minLon, maxLon, ds, startTime, endTime,
                                                     fetch_data=False)]
 
         res = NexusResults(results=res)
diff --git a/analysis/webservice/algorithms/TimeAvgMap.py b/analysis/webservice/algorithms/TimeAvgMap.py
index 3855f0b..ce8085a 100644
--- a/analysis/webservice/algorithms/TimeAvgMap.py
+++ b/analysis/webservice/algorithms/TimeAvgMap.py
@@ -47,7 +47,7 @@ class TimeAvgMapHandlerImpl(NexusHandler):
         t = self._endTime
         t_incr = 86400
         while ntiles == 0:
-            nexus_tiles = self._tile_service.get_tiles_bounded_by_box(midLat - 0.5, midLat + 0.5, midLon - 0.5,
+            nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box(midLat - 0.5, midLat + 0.5, midLon - 0.5,
                                                                       midLon + 0.5, ds=self._ds, start_time=t - t_incr,
                                                                       end_time=t)
             ntiles = len(nexus_tiles)
@@ -78,7 +78,7 @@ class TimeAvgMapHandlerImpl(NexusHandler):
         t = self._endTime
         t_incr = 86400
         while ntiles == 0:
-            nexus_tiles = self._tile_service.get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon,
+            nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon,
                                                                       self._maxLon, ds=self._ds, start_time=t - t_incr,
                                                                       end_time=t)
             ntiles = len(nexus_tiles)
@@ -126,7 +126,7 @@ class TimeAvgMapHandlerImpl(NexusHandler):
                 t1 = time()
                 print 'nexus call start at time %f' % t1
                 sys.stdout.flush()
-                nexus_tiles = self._tile_service.get_tiles_bounded_by_box(min_lat - self._latRes / 2,
+                nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box(min_lat - self._latRes / 2,
                                                                           max_lat + self._latRes / 2,
                                                                           min_lon - self._lonRes / 2,
                                                                           max_lon + self._lonRes / 2, ds=self._ds,
diff --git a/analysis/webservice/algorithms/TimeSeries.py b/analysis/webservice/algorithms/TimeSeries.py
index 429d023..405e567 100644
--- a/analysis/webservice/algorithms/TimeSeries.py
+++ b/analysis/webservice/algorithms/TimeSeries.py
@@ -206,7 +206,7 @@ class TimeSeriesHandlerImpl(NexusHandler):
                                               apply_seasonal_cycle_filter=True, apply_low_pass_filter=True):
 
         the_time = datetime.now()
-        daysinrange = self._tile_service.find_days_in_range_asc(bounding_polygon.bounds[1],
+        daysinrange = self._get_tile_service().find_days_in_range_asc(bounding_polygon.bounds[1],
                                                                 bounding_polygon.bounds[3],
                                                                 bounding_polygon.bounds[0],
                                                                 bounding_polygon.bounds[2],
@@ -310,7 +310,7 @@ class TimeSeriesHandlerImpl(NexusHandler):
             end_of_month = datetime(year, month, calendar.monthrange(year, month)[1], 23, 59, 59)
             start = (pytz.UTC.localize(beginning_of_month) - EPOCH).total_seconds()
             end = (pytz.UTC.localize(end_of_month) - EPOCH).total_seconds()
-            tile_stats = self._tile_service.find_tiles_in_polygon(bounding_polygon, ds, start, end,
+            tile_stats = self._get_tile_service().find_tiles_in_polygon(bounding_polygon, ds, start, end,
                                                                   fl=('id,'
                                                                       'tile_avg_val_d,tile_count_i,'
                                                                       'tile_min_val_d,tile_max_val_d,'
@@ -336,8 +336,8 @@ class TimeSeriesHandlerImpl(NexusHandler):
             tile_counts = [tile.tile_stats.count for tile in inner_tiles]
 
             # Border tiles need have the data loaded, masked, and stats recalculated
-            border_tiles = list(self._tile_service.fetch_data_for_tiles(*border_tiles))
-            border_tiles = self._tile_service.mask_tiles_to_polygon(bounding_polygon, border_tiles)
+            border_tiles = list(self._get_tile_service().fetch_data_for_tiles(*border_tiles))
+            border_tiles = self._get_tile_service().mask_tiles_to_polygon(bounding_polygon, border_tiles)
             for tile in border_tiles:
                 tile.update_stats()
                 tile_means.append(tile.tile_stats.mean)
@@ -367,9 +367,9 @@ class TimeSeriesHandlerImpl(NexusHandler):
     @lru_cache()
     def get_min_max_date(self, ds=None):
         min_date = pytz.timezone('UTC').localize(
-            datetime.utcfromtimestamp(self._tile_service.get_min_time([], ds=ds)))
+            datetime.utcfromtimestamp(self._get_tile_service().get_min_time([], ds=ds)))
         max_date = pytz.timezone('UTC').localize(
-            datetime.utcfromtimestamp(self._tile_service.get_max_time([], ds=ds)))
+            datetime.utcfromtimestamp(self._get_tile_service().get_max_time([], ds=ds)))
 
         return min_date.date(), max_date.date()
 
diff --git a/analysis/webservice/algorithms/TimeSeriesSolr.py b/analysis/webservice/algorithms/TimeSeriesSolr.py
index a5d2c21..2cc8d37 100644
--- a/analysis/webservice/algorithms/TimeSeriesSolr.py
+++ b/analysis/webservice/algorithms/TimeSeriesSolr.py
@@ -98,7 +98,7 @@ class TimeSeriesHandlerImpl(NexusHandler):
     def getTimeSeriesStatsForBoxSingleDataSet(self, min_lat, max_lat, min_lon, max_lon, ds, start_time=0, end_time=-1,
                                               applySeasonalFilter=True, applyLowPass=True):
 
-        daysinrange = self._tile_service.find_days_in_range_asc(min_lat, max_lat, min_lon, max_lon, ds, start_time,
+        daysinrange = self._get_tile_service().find_days_in_range_asc(min_lat, max_lat, min_lon, max_lon, ds, start_time,
                                                                 end_time)
 
         if len(daysinrange) == 0:
diff --git a/analysis/webservice/algorithms/doms/DatasetListQuery.py b/analysis/webservice/algorithms/doms/DatasetListQuery.py
index fda647b..7c418c9 100644
--- a/analysis/webservice/algorithms/doms/DatasetListQuery.py
+++ b/analysis/webservice/algorithms/doms/DatasetListQuery.py
@@ -91,7 +91,7 @@ class DomsDatasetListQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
     @cached(ttl=(60 * 60 * 1000))  # 1 hour cached
     def calc(self, computeOptions, **args):
 
-        satellitesList = self._tile_service.get_dataseries_list(simple=True)
+        satellitesList = self._get_tile_service().get_dataseries_list(simple=True)
 
         insituList = []
 
diff --git a/analysis/webservice/algorithms/doms/MatchupQuery.py b/analysis/webservice/algorithms/doms/MatchupQuery.py
index 15e8286..cff23a0 100644
--- a/analysis/webservice/algorithms/doms/MatchupQuery.py
+++ b/analysis/webservice/algorithms/doms/MatchupQuery.py
@@ -109,14 +109,14 @@ class CombinedDomsMatchupQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
                 '''
                 Single Threaded at the moment...
                 '''
-                daysinrange = self._tile_service.find_days_in_range_asc(bounds.south, bounds.north, bounds.west,
+                daysinrange = self._get_tile_service().find_days_in_range_asc(bounds.south, bounds.north, bounds.west,
                                                                         bounds.east, matchupId,
                                                                         self.__parseDatetime(startTime) / 1000,
                                                                         self.__parseDatetime(endTime) / 1000)
 
                 tilesByDay = {}
                 for dayTimestamp in daysinrange:
-                    ds1_nexus_tiles = self._tile_service.get_tiles_bounded_by_box_at_time(bounds.south, bounds.north,
+                    ds1_nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box_at_time(bounds.south, bounds.north,
                                                                                           bounds.west, bounds.east,
                                                                                           matchupId, dayTimestamp)
 
diff --git a/analysis/webservice/algorithms_spark/CorrMapSpark.py b/analysis/webservice/algorithms_spark/CorrMapSpark.py
index 9503298..6627536 100644
--- a/analysis/webservice/algorithms_spark/CorrMapSpark.py
+++ b/analysis/webservice/algorithms_spark/CorrMapSpark.py
@@ -198,7 +198,7 @@ class CorrMapSparkHandlerImpl(SparkHandler):
         self.log.debug('Using Native resolution: lat_res={0}, lon_res={1}'.format(self._latRes, self._lonRes))
         self.log.debug('nlats={0}, nlons={1}'.format(self._nlats, self._nlons))
 
-        daysinrange = self._tile_service.find_days_in_range_asc(self._minLat,
+        daysinrange = self._get_tile_service().find_days_in_range_asc(self._minLat,
                                                                 self._maxLat,
                                                                 self._minLon,
                                                                 self._maxLon,
diff --git a/analysis/webservice/algorithms_spark/DailyDifferenceAverageSpark.py b/analysis/webservice/algorithms_spark/DailyDifferenceAverageSpark.py
index 2bc511e..d164532 100644
--- a/analysis/webservice/algorithms_spark/DailyDifferenceAverageSpark.py
+++ b/analysis/webservice/algorithms_spark/DailyDifferenceAverageSpark.py
@@ -132,7 +132,7 @@ class DailyDifferenceAverageSparkImpl(SparkHandler):
         self.log.debug("Querying for tiles in search domain")
         # Get tile ids in box
         tile_ids = [tile.tile_id for tile in
-                    self._tile_service.find_tiles_in_polygon(bounding_polygon, dataset,
+                    self._get_tile_service().find_tiles_in_polygon(bounding_polygon, dataset,
                                                              start_seconds_from_epoch, end_seconds_from_epoch,
                                                              fetch_data=False, fl='id',
                                                              sort=['tile_min_time_dt asc', 'tile_min_lon asc',
diff --git a/analysis/webservice/algorithms_spark/HofMoellerSpark.py b/analysis/webservice/algorithms_spark/HofMoellerSpark.py
index 5fd01ba..12320b1 100644
--- a/analysis/webservice/algorithms_spark/HofMoellerSpark.py
+++ b/analysis/webservice/algorithms_spark/HofMoellerSpark.py
@@ -351,7 +351,7 @@ class LatitudeTimeHoffMoellerSparkHandlerImpl(BaseHoffMoellerHandlerImpl):
         min_lon, min_lat, max_lon, max_lat = bbox.bounds
 
         nexus_tiles_spark = [(self._latlon, tile.tile_id, x, min_lat, max_lat, min_lon, max_lon) for x, tile in
-                             enumerate(self._tile_service.find_tiles_in_box(min_lat, max_lat, min_lon, max_lon,
+                             enumerate(self._get_tile_service().find_tiles_in_box(min_lat, max_lat, min_lon, max_lon,
                                                                             ds, start_time, end_time,
                                                                             metrics_callback=metrics_record.record_metrics,
                                                                             fetch_data=False))]
@@ -402,7 +402,7 @@ class LongitudeTimeHoffMoellerSparkHandlerImpl(BaseHoffMoellerHandlerImpl):
         min_lon, min_lat, max_lon, max_lat = bbox.bounds
 
         nexus_tiles_spark = [(self._latlon, tile.tile_id, x, min_lat, max_lat, min_lon, max_lon) for x, tile in
-                             enumerate(self._tile_service.find_tiles_in_box(min_lat, max_lat, min_lon, max_lon,
+                             enumerate(self._get_tile_service().find_tiles_in_box(min_lat, max_lat, min_lon, max_lon,
                                                                             ds, start_time, end_time,
                                                                             metrics_callback=metrics_record.record_metrics,
                                                                             fetch_data=False))]
diff --git a/analysis/webservice/algorithms_spark/Matchup.py b/analysis/webservice/algorithms_spark/Matchup.py
index b6bf95c..17d14ec 100644
--- a/analysis/webservice/algorithms_spark/Matchup.py
+++ b/analysis/webservice/algorithms_spark/Matchup.py
@@ -31,7 +31,7 @@ from scipy import spatial
 from shapely import wkt
 from shapely.geometry import Point
 from shapely.geometry import box
-from shapely.geos import ReadingError
+from shapely.geos import WKTReadingError
 
 from webservice.NexusHandler import SparkHandler, nexus_handler
 from webservice.algorithms.doms import config as edge_endpoints
@@ -220,7 +220,7 @@ class Matchup(SparkHandler):
         self.log.debug("Querying for tiles in search domain")
         # Get tile ids in box
         tile_ids = [tile.tile_id for tile in
-                    self._tile_service.find_tiles_in_polygon(bounding_polygon, primary_ds_name,
+                    self._get_tile_service().find_tiles_in_polygon(bounding_polygon, primary_ds_name,
                                                              start_seconds_from_epoch, end_seconds_from_epoch,
                                                              fetch_data=False, fl='id',
                                                              sort=['tile_min_time_dt asc', 'tile_min_lon asc',
@@ -400,7 +400,7 @@ class DomsPoint(object):
 
         try:
             x, y = wkt.loads(edge_point['point']).coords[0]
-        except ReadingError:
+        except WKTReadingError:
             try:
                 x, y = Point(*[float(c) for c in edge_point['point'].split(' ')]).coords[0]
             except ValueError:
@@ -580,7 +580,7 @@ def match_satellite_to_insitu(tile_ids, primary_b, matchup_b, parameter_b, tt_b,
     for n, edge_point in enumerate(edge_results):
         try:
             x, y = wkt.loads(edge_point['point']).coords[0]
-        except ReadingError:
+        except WKTReadingError:
             try:
                 x, y = Point(*[float(c) for c in edge_point['point'].split(' ')]).coords[0]
             except ValueError:
diff --git a/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py b/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
index b1d11d4..ca430eb 100644
--- a/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
+++ b/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
@@ -162,7 +162,7 @@ class TimeAvgMapSparkHandlerImpl(SparkHandler):
         self.log.debug('Found {0} tiles'.format(len(nexus_tiles)))
         print('Found {} tiles'.format(len(nexus_tiles)))
 
-        daysinrange = self._tile_service.find_days_in_range_asc(bbox.bounds[1],
+        daysinrange = self._get_tile_service().find_days_in_range_asc(bbox.bounds[1],
                                                                 bbox.bounds[3],
                                                                 bbox.bounds[0],
                                                                 bbox.bounds[2],
diff --git a/analysis/webservice/algorithms_spark/TimeSeriesSpark.py b/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
index 07b95f2..20f989a 100644
--- a/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
+++ b/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
@@ -178,7 +178,7 @@ class TimeSeriesHandlerImpl(SparkHandler):
         for shortName in ds:
 
             the_time = datetime.now()
-            daysinrange = self._tile_service.find_days_in_range_asc(bounding_polygon.bounds[1],
+            daysinrange = self._get_tile_service().find_days_in_range_asc(bounding_polygon.bounds[1],
                                                                     bounding_polygon.bounds[3],
                                                                     bounding_polygon.bounds[0],
                                                                     bounding_polygon.bounds[2],
@@ -287,7 +287,7 @@ class TimeSeriesHandlerImpl(SparkHandler):
             end_of_month = datetime(year, month, calendar.monthrange(year, month)[1], 23, 59, 59)
             start = (pytz.UTC.localize(beginning_of_month) - EPOCH).total_seconds()
             end = (pytz.UTC.localize(end_of_month) - EPOCH).total_seconds()
-            tile_stats = self._tile_service.find_tiles_in_polygon(bounding_polygon, ds, start, end,
+            tile_stats = self._get_tile_service().find_tiles_in_polygon(bounding_polygon, ds, start, end,
                                                                   fl=('id,'
                                                                       'tile_avg_val_d,tile_count_i,'
                                                                       'tile_min_val_d,tile_max_val_d,'
@@ -313,8 +313,8 @@ class TimeSeriesHandlerImpl(SparkHandler):
             tile_counts = [tile.tile_stats.count for tile in inner_tiles]
 
             # Border tiles need have the data loaded, masked, and stats recalculated
-            border_tiles = list(self._tile_service.fetch_data_for_tiles(*border_tiles))
-            border_tiles = self._tile_service.mask_tiles_to_polygon(bounding_polygon, border_tiles)
+            border_tiles = list(self._get_tile_service().fetch_data_for_tiles(*border_tiles))
+            border_tiles = self._get_tile_service().mask_tiles_to_polygon(bounding_polygon, border_tiles)
             for tile in border_tiles:
                 tile.update_stats()
                 tile_means.append(tile.tile_stats.mean)
@@ -344,9 +344,9 @@ class TimeSeriesHandlerImpl(SparkHandler):
     @lru_cache()
     def get_min_max_date(self, ds=None):
         min_date = pytz.timezone('UTC').localize(
-            datetime.utcfromtimestamp(self._tile_service.get_min_time([], ds=ds)))
+            datetime.utcfromtimestamp(self._get_tile_service().get_min_time([], ds=ds)))
         max_date = pytz.timezone('UTC').localize(
-            datetime.utcfromtimestamp(self._tile_service.get_max_time([], ds=ds)))
+            datetime.utcfromtimestamp(self._get_tile_service().get_max_time([], ds=ds)))
 
         return min_date.date(), max_date.date()
 
diff --git a/analysis/webservice/algorithms_spark/VarianceSpark.py b/analysis/webservice/algorithms_spark/VarianceSpark.py
index 07b055e..8c96cb7 100644
--- a/analysis/webservice/algorithms_spark/VarianceSpark.py
+++ b/analysis/webservice/algorithms_spark/VarianceSpark.py
@@ -161,7 +161,7 @@ class VarianceSparkHandlerImpl(SparkHandler):
         self.log.debug('Found {0} tiles'.format(len(nexus_tiles)))
         print('Found {} tiles'.format(len(nexus_tiles)))
 
-        daysinrange = self._tile_service.find_days_in_range_asc(bbox.bounds[1],
+        daysinrange = self._get_tile_service().find_days_in_range_asc(bbox.bounds[1],
                                                                 bbox.bounds[3],
                                                                 bbox.bounds[0],
                                                                 bbox.bounds[2],
diff --git a/analysis/webservice/webapp.py b/analysis/webservice/webapp.py
index ad5c1a7..55792ce 100644
--- a/analysis/webservice/webapp.py
+++ b/analysis/webservice/webapp.py
@@ -25,7 +25,6 @@ import matplotlib
 import pkg_resources
 import tornado.web
 from tornado.options import define, options, parse_command_line
-
 from webservice import NexusHandler
 from webservice.webmodel import NexusRequestObject, NexusProcessingException
 
@@ -156,8 +155,28 @@ class ModularNexusHandlerWrapper(BaseHandler):
         if hasattr(result, 'cleanup'):
             result.cleanup()
 
+def inject_args_in_config(args, config):
+    """
+        Takes command argparse arguments and push them in the config
+         with syntax args.<section>-<option>
+    """
+    log = logging.getLogger(__name__)
+
+    for t_opt in args._options.values():
+        n = t_opt.name
+        first_ = n.find('_')
+        if first_ > 0:
+            s, o = n[:first_], n[first_+1:]
+            v = t_opt.value()
+            log.info('inject argument {} = {} in configuration section {}, option {}'.format(n, v , s, o))
+            if not config.has_section(s):
+                config.add_section(s)
+            config.set(s, o, v)
+    return config
+
 
 if __name__ == "__main__":
+
     logging.basicConfig(
         level=logging.DEBUG,
         format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
@@ -174,7 +193,12 @@ if __name__ == "__main__":
     define("debug", default=False, help="run in debug mode")
     define("port", default=webconfig.get("global", "server.socket_port"), help="run on the given port", type=int)
     define("address", default=webconfig.get("global", "server.socket_host"), help="Bind to the given address")
+    define('solr_time_out', default=60,
+           help='time out for solr requests in seconds, default (60) is ok for most deployments'
+                ' when solr performances are not good this might need to be increased')
+
     parse_command_line()
+    algorithm_config = inject_args_in_config(options, algorithm_config)
 
     moduleDirs = webconfig.get("modules", "module_dirs").split(",")
     for moduleDir in moduleDirs:
diff --git a/data-access/nexustiles/dao/SolrProxy.py b/data-access/nexustiles/dao/SolrProxy.py
index 0a36707..beba557 100644
--- a/data-access/nexustiles/dao/SolrProxy.py
+++ b/data-access/nexustiles/dao/SolrProxy.py
@@ -36,12 +36,17 @@ class SolrProxy(object):
     def __init__(self, config):
         self.solrUrl = config.get("solr", "host")
         self.solrCore = config.get("solr", "core")
+        solr_kargs = {}
+        if config.has_option("solr", "time_out"):
+            solr_kargs["timeout"] = config.get("solr", "time_out")
         self.logger = logging.getLogger('nexus')
 
         with SOLR_CON_LOCK:
             solrcon = getattr(thread_local, 'solrcon', None)
             if solrcon is None:
-                solrcon = pysolr.Solr('http://%s/solr/%s' % (self.solrUrl, self.solrCore))
+                solr_url = 'http://%s/solr/%s' % (self.solrUrl, self.solrCore)
+                self.logger.info("connect to solr, url {} with option(s) = {}".format(solr_url, solr_kargs))
+                solrcon = pysolr.Solr(solr_url, **solr_kargs)
                 thread_local.solrcon = solrcon
 
             self.solrcon = solrcon
diff --git a/data-access/nexustiles/nexustiles.py b/data-access/nexustiles/nexustiles.py
index 71fd0bb..c487059 100644
--- a/data-access/nexustiles/nexustiles.py
+++ b/data-access/nexustiles/nexustiles.py
@@ -80,12 +80,12 @@ class NexusTileService(object):
         self._datastore = None
         self._metadatastore = None
 
-        if config is None:
-            self._config = ConfigParser.RawConfigParser()
-            self._config.readfp(pkg_resources.resource_stream(__name__, "config/datastores.ini"),
-                                filename='datastores.ini')
-        else:
-            self._config = config
+        self._config = ConfigParser.RawConfigParser()
+        self._config.readfp(pkg_resources.resource_stream(__name__, "config/datastores.ini"),
+                            filename='datastores.ini')
+        if config:
+            self.override_config(config)
+
 
         if not skipDatastore:
             datastore = self._config.get("datastore", "store")
@@ -101,6 +101,13 @@ class NexusTileService(object):
         if not skipMetadatastore:
             self._metadatastore = dao.SolrProxy.SolrProxy(self._config)
 
+    def override_config(self, config):
+        for section in config.sections():
+            if self._config.has_section(section): # only override preexisting section, ignores the other
+                for option in config.options(section):
+                    self._config.set(section, option, config.get(section, option))
+
+
     def get_dataseries_list(self, simple=False):
         if simple:
             return self._metadatastore.get_data_series_list_simple()