You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sdap.apache.org by tl...@apache.org on 2020/07/10 19:47:00 UTC

[incubator-sdap-nexus] branch master updated: Sdap 252 (#104)

This is an automated email from the ASF dual-hosted git repository.

tloubrieu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git


The following commit(s) were added to refs/heads/master by this push:
     new de242e9  Sdap 252 (#104)
de242e9 is described below

commit de242e9d89632ed4c859d32fa7a99549e992be50
Author: thomas loubrieu <60...@users.noreply.github.com>
AuthorDate: Fri Jul 10 12:46:50 2020 -0700

    Sdap 252 (#104)
    
    Remove singleton pattern for algorthim handlers which caused a bug on concurrent request (SDAP-252), simplifies the handler hierarchy:
    
    remove the singleton pattern for the nexusHandlers, works for 3 spark algorithm so far
    
    remove singleton pattern for list handler (maybe not a good idea...)
    
    simplify class handler hierarchy
    
    split files per objects
    
    simplify class handler hierarchy
    
    add details for developers deployment in README
    
    delete moved package
    
    correct typo in Spark handler class name
    
    add conda dependencies versions, make docker work
    
    update unit test
    
    correct impact of current ticket on unit tests, some still don t work since they require specific data
    
    resolve conflicts between SDAP-252 and SDAP-249
    
    Co-authored-by: thomas loubrieu <th...@jpl.nasa.gov>
---
 .gitignore                                         |   5 +-
 analysis/README.md                                 |  53 ++-
 analysis/__init__.py                               |   0
 analysis/conda-requirements.txt                    |  32 +-
 analysis/setup.py                                  |  24 +-
 .../algorithms/StandardDeviationSearch_test.py     |  38 +-
 .../tests/algorithms/longitudelatitudemap_test.py  |  14 +-
 analysis/webservice/NexusHandler.py                | 505 +-------------------
 analysis/webservice/WorkflowDriver.py              |   4 +-
 analysis/webservice/__init__.py                    |   1 -
 analysis/webservice/algorithms/Capabilities.py     |  15 +-
 analysis/webservice/algorithms/ColorBarHandler.py  |   4 +-
 analysis/webservice/algorithms/CorrelationMap.py   |   7 +-
 .../algorithms/DailyDifferenceAverage.py           |   7 +-
 .../webservice/algorithms/DataInBoundsSearch.py    |   7 +-
 analysis/webservice/algorithms/DataSeriesList.py   |   6 +-
 analysis/webservice/algorithms/DelayTest.py        |   6 +-
 analysis/webservice/algorithms/ErrorTosserTest.py  |   7 +-
 analysis/webservice/algorithms/Heartbeat.py        |   7 +-
 analysis/webservice/algorithms/HofMoeller.py       |  18 +-
 .../webservice/algorithms/LongitudeLatitudeMap.py  |   9 +-
 analysis/webservice/algorithms/MapFetchHandler.py  |  20 +-
 analysis/webservice/algorithms/NexusCalcHandler.py |  77 +++
 .../algorithms/StandardDeviationSearch.py          |   9 +-
 analysis/webservice/algorithms/TileSearch.py       |   6 +-
 analysis/webservice/algorithms/TimeAvgMap.py       |   7 +-
 analysis/webservice/algorithms/TimeSeries.py       |   9 +-
 analysis/webservice/algorithms/TimeSeriesSolr.py   |   7 +-
 .../webservice/algorithms/doms/BaseDomsHandler.py  |   4 +-
 .../webservice/algorithms/doms/DatasetListQuery.py |   4 +-
 .../algorithms/doms/DomsInitialization.py          |  30 +-
 .../webservice/algorithms/doms/MatchupQuery.py     |   4 +-
 .../webservice/algorithms/doms/MetadataQuery.py    |   4 +-
 .../webservice/algorithms/doms/ResultsPlotQuery.py |   4 +-
 .../webservice/algorithms/doms/ResultsRetrieval.py |   4 +-
 analysis/webservice/algorithms/doms/StatsQuery.py  |   4 +-
 analysis/webservice/algorithms/doms/ValuesQuery.py |   4 +-
 .../{domsconfig.ini => domsconfig.ini.default}     |   1 +
 .../webservice/algorithms/doms/histogramplot.py    |   3 +-
 .../webservice/algorithms/doms/insitusubset.py     |   4 +-
 analysis/webservice/algorithms/doms/mapplot.py     |   3 +-
 analysis/webservice/algorithms/doms/scatterplot.py |   3 +-
 analysis/webservice/algorithms/doms/subsetter.py   |   4 +-
 .../webservice/algorithms_spark/ClimMapSpark.py    |  25 +-
 .../webservice/algorithms_spark/CorrMapSpark.py    |  11 +-
 .../DailyDifferenceAverageSpark.py                 |   9 +-
 .../webservice/algorithms_spark/HofMoellerSpark.py |  25 +-
 analysis/webservice/algorithms_spark/Matchup.py    |   9 +-
 .../algorithms_spark/MaximaMinimaSpark.py          |  10 +-
 .../NexusCalcSparkHandler.py}                      | 393 +++-------------
 .../webservice/algorithms_spark/TimeAvgMapSpark.py |  66 +--
 .../webservice/algorithms_spark/TimeSeriesSpark.py |  11 +-
 .../webservice/algorithms_spark/VarianceSpark.py   |   8 +-
 analysis/webservice/algorithms_spark/__init__.py   |  11 +
 analysis/webservice/nexus_tornado/__init__.py      |   0
 .../webservice/nexus_tornado/request/__init__.py   |   0
 .../request/handlers/NexusRequestHandler.py        |  63 +++
 .../nexus_tornado/request/handlers/__init__.py     |   1 +
 .../request/renderers/NexusCSVRenderer.py          |  17 +
 .../request/renderers/NexusJSONRenderer.py         |  19 +
 .../request/renderers/NexusNETCDFRenderer.py       |  17 +
 .../request/renderers/NexusPNGRenderer.py          |  17 +
 .../request/renderers/NexusRendererFactory.py      |  16 +
 .../request/renderers/NexusZIPRenderer.py          |  17 +
 .../nexus_tornado/request/renderers/__init__.py    |   6 +
 analysis/webservice/webapp.py                      | 147 +-----
 analysis/webservice/webapp_livy.py                 |  12 +-
 analysis/webservice/webmodel.py                    | 520 ---------------------
 analysis/webservice/webmodel/CustomEncoder.py      |  49 ++
 analysis/webservice/webmodel/Exceptions.py         |  19 +
 analysis/webservice/webmodel/NexusRequestObject.py | 227 +++++++++
 .../webmodel/NexusRequestObjectTornadoFree.py      | 108 +++++
 analysis/webservice/webmodel/NexusResults.py       | 104 +++++
 analysis/webservice/webmodel/RequestParameters.py  |  23 +
 .../webservice/webmodel/StandardNexusErrors.py     |   4 +
 .../webservice/webmodel/StatsComputeOptions.py     |  66 +++
 analysis/webservice/webmodel/__init__.py           |  10 +
 analysis/webservice/webmodel/cached.py             |  38 ++
 .../{datastores.ini => datastores.ini.default}     |   1 +
 data-access/nexustiles/dao/CassandraProxy.py       |  10 +-
 data-access/nexustiles/nexustiles.py               |  20 +-
 data-access/requirements.txt                       |  10 +-
 data-access/setup.py                               |   2 +-
 docker/nexus-webapp/Dockerfile                     |   8 +-
 84 files changed, 1340 insertions(+), 1773 deletions(-)

diff --git a/.gitignore b/.gitignore
index ddfe3ca..3e29626 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,7 @@
 *.vscode
 *.code-workspace
 *.idea
-*.DS_Store
\ No newline at end of file
+*.DS_Store
+analysis/webservice/algorithms/doms/domsconfig.ini
+data-access/nexustiles/config/datastores.ini
+venv/
diff --git a/analysis/README.md b/analysis/README.md
index 3607682..a55841b 100644
--- a/analysis/README.md
+++ b/analysis/README.md
@@ -5,21 +5,62 @@ Python module that exposes NEXUS analytical capabilities via a HTTP webservice.
 
 # Developer Setup
 
-**NOTE** This project has a dependency on [data-access](https://github.jpl.nasa.gov/thuang/nexus/tree/master/data-access). Make sure data-access is installed in the same environment you will be using for this module.
+**NOTE** This project has a dependency on [nexusproto](https://github.com/apache/incubator-sdap-nexusprotohttps://github.com/apache/incubator-sdap-nexusproto). Make sure data-access is installed in the same environment you will be using for this module.
 
 1. Setup a separate conda env or activate an existing one
 
     ````
-    conda create --name nexus-analysis python
-    source activate nexus-analysis
+    conda create --name nexus-analysis python=2.7.17
+    conda activate nexus-analysis
     ````
 
 2. Install conda dependencies
 
     ````
-    conda install numpy matplotlib mpld3 scipy netCDF4 basemap gdal pyproj=1.9.5.1 libnetcdf=4.3.3.1
+    cd analysis
+    conda install pyspark
+    conda install -c conda-forge --file conda-requirements.txt
+    #conda install numpy matplotlib mpld3 scipy netCDF4 basemap gdal pyproj=1.9.5.1 libnetcdf=4.3.3.1
     ````
 
-3. Run `python setup.py install`
+3. Update the configuration for solr and cassandra
+
+Create .ini files from a copy of their counterpart .ini.default
+
+    analysis/webservice/algorithms/doms/domsconfig.ini
+    data-access/nexustiles/config/datastores.ini
+
+This files will override the default values.
+
+They are not saved on git and will not be overridden when you pull the code.
+
+BUT be carefull to remove them when you build the docker image. Otherwise they will be embedded and use in the docker image.
+
+3. install nexusproto
+
+4. install data-access dependency:
+
+    ````
+    cd data-access
+    pip install cython
+    python setup.py install
+    ````
+
+5. Set environment variables (examples):
+
+    ```
+    PYTHONUNBUFFERED=1
+    PROJ_LIB=/opt/anaconda2/envs/nexus-analysis/lib/python2.7/site-packages/pyproj/data
+    JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_241.jdk/Contents/Home
+     ```
+
+5. Launch unit tests
+
+    pip install pytest
+    pytest
+    
+
+5. Launch `python webservice/webapp.py` in command line or run it from the IDE.
+
+
 
-4. Launch `python webservice/webapp.py`
diff --git a/analysis/__init__.py b/analysis/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/analysis/conda-requirements.txt b/analysis/conda-requirements.txt
index aa9fafb..6d9a35e 100644
--- a/analysis/conda-requirements.txt
+++ b/analysis/conda-requirements.txt
@@ -1,15 +1,17 @@
-tornado
-singledispatch
-pytz
-utm
-Shapely
-mock
-backports.functools_lru_cache
-netCDF4
-boto3
-pyproj<1.9.5
-Pillow
-GDAL>=3.0.2
-basemap
-scipy
-mpld3
\ No newline at end of file
+netcdf4==1.5.1.2
+basemap==1.2.0
+scipy==1.2.1
+pytz==2020.1
+utm==0.5.0
+shapely==1.6.4
+backports.functools_lru_cache==1.6.1
+boto3==1.14.16
+pillow==5.4.1
+mpld3=0.5.1
+tornado==5.1.1
+pyproj==1.9.4
+geos==3.7.2
+gdal==3.0.2
+mock==2.0.0
+singledispatch==3.4.0.3
+
diff --git a/analysis/setup.py b/analysis/setup.py
index d6d6803..62a6891 100644
--- a/analysis/setup.py
+++ b/analysis/setup.py
@@ -35,17 +35,23 @@ setuptools.setup(
 
     description="NEXUS API.",
     long_description=open('README.md').read(),
-
-    packages=[
-        'webservice',
-        'webservice.algorithms',
-        'webservice.algorithms.doms',
-        'webservice.algorithms_spark',
-        'webservice.metrics'
-    ],
+    packages=setuptools.find_packages(),
+    #packages=[
+    #    'webservice',
+    #    'webservice.algorithms',
+    #    'webservice.algorithms.doms',
+    #    'webservice.algorithms_spark',
+    #    'webservice.metrics',
+    #    'webservice.webmodel',
+    #    'webservice.tornado_nexus',
+    #    'webservice.nexus_tornado',
+    #    'webservice.nexus_tornado.request',
+    #    'webservice.nexus_tornado.request.handlers',
+    #    'webservice.nexus_tornado.request.renderers'
+    #],
     package_data={
         'webservice': ['config/web.ini', 'config/algorithms.ini'],
-        'webservice.algorithms.doms': ['domsconfig.ini']
+        'webservice.algorithms.doms': ['domsconfig.ini.default']
     },
     data_files=[
         ('static', ['static/index.html'])
diff --git a/analysis/tests/algorithms/StandardDeviationSearch_test.py b/analysis/tests/algorithms/StandardDeviationSearch_test.py
index f0f740e..887f18e 100644
--- a/analysis/tests/algorithms/StandardDeviationSearch_test.py
+++ b/analysis/tests/algorithms/StandardDeviationSearch_test.py
@@ -17,7 +17,7 @@
 import json
 import unittest
 import urllib
-from multiprocessing.pool import ThreadPool
+#from multiprocessing.pool import ThreadPool
 from unittest import skip
 
 import numpy as np
@@ -25,25 +25,25 @@ from mock import Mock
 from nexustiles.model.nexusmodel import Tile, BBox
 from nexustiles.nexustiles import NexusTileService
 from tornado.testing import AsyncHTTPTestCase, bind_unused_port
-from tornado.web import Application
+import tornado.web
 
-from webservice.NexusHandler import AlgorithmModuleWrapper
 from webservice.algorithms import StandardDeviationSearch
-from webservice.webapp import ModularNexusHandlerWrapper
+from webservice.nexus_tornado.request.handlers.NexusRequestHandler import NexusRequestHandler
 
 
 class HttpParametersTest(AsyncHTTPTestCase):
     def get_app(self):
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path
-        algorithm = AlgorithmModuleWrapper(StandardDeviationSearch.StandardDeviationSearchHandlerImpl)
-        thread_pool = ThreadPool(processes=1)
-        return Application(
-            [(path, ModularNexusHandlerWrapper, dict(clazz=algorithm, algorithm_config=None, thread_pool=thread_pool))],
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path
+        algorithm = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl
+        thread_pool = tornado.concurrent.futures.ThreadPoolExecutor(1)
+        #thread_pool = ThreadPool(processes=1)
+        return tornado.web.Application(
+            [(path, NexusRequestHandler, dict(clazz=algorithm, thread_pool=thread_pool))],
             default_host=bind_unused_port()
         )
 
     def test_no_ds_400(self):
-        response = self.fetch(StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path)
+        response = self.fetch(StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path)
         self.assertEqual(400, response.code)
         body = json.loads(response.body)
         self.assertEqual("'ds' argument is required", body['error'])
@@ -52,7 +52,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
         params = {
             "ds": "dataset"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         response = self.fetch(path)
         self.assertEqual(400, response.code)
         body = json.loads(response.body)
@@ -63,7 +63,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
             "ds": "dataset",
             "longitude": "22.4"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         response = self.fetch(path)
         self.assertEqual(400, response.code)
         body = json.loads(response.body)
@@ -75,7 +75,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
             "longitude": "22.4",
             "latitude": "-84.32"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         response = self.fetch(path)
         self.assertEqual(400, response.code)
         body = json.loads(response.body)
@@ -88,7 +88,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
             "latitude": "-84.32",
             "day": "yayday"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         response = self.fetch(path)
         self.assertEqual(400, response.code)
         body = json.loads(response.body)
@@ -102,7 +102,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
             "day": "35",
             "date": "1992-01-01T00:00:00Z"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         response = self.fetch(path)
         self.assertEqual(400, response.code)
         body = json.loads(response.body)
@@ -115,7 +115,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
             "latitude": "-84.32",
             "day": "35"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         response = self.fetch(path)
         self.assertEqual(200, response.code)
 
@@ -127,7 +127,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
             "date": "1992-01-01T00:00:00Z",
             "allInTile": "false"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         response = self.fetch(path)
         self.assertEqual(200, response.code)
 
@@ -139,7 +139,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
             "latitude": "-78.225",
             "day": "1"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         response = self.fetch(path)
         self.assertEqual(200, response.code)
         print response.body
@@ -155,7 +155,7 @@ class HttpParametersTest(AsyncHTTPTestCase):
             "date": "2016-01-01T00:00:00Z",
             "allInTile": "false"
         }
-        path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+        path = StandardDeviationSearch.StandardDeviationSearchCalcHandlerImpl.path + '?' + urllib.urlencode(params)
         # Increase timeouts when debugging
         # self.http_client.fetch(self.get_url(path), self.stop, connect_timeout=99999999, request_timeout=999999999)
         # response = self.wait(timeout=9999999999)
diff --git a/analysis/tests/algorithms/longitudelatitudemap_test.py b/analysis/tests/algorithms/longitudelatitudemap_test.py
index 3c2475b..0e0e081 100644
--- a/analysis/tests/algorithms/longitudelatitudemap_test.py
+++ b/analysis/tests/algorithms/longitudelatitudemap_test.py
@@ -18,14 +18,14 @@ import time
 import unittest
 from multiprocessing.pool import ThreadPool
 
-from webservice.NexusHandler import AlgorithmModuleWrapper
 from mock import MagicMock
 from nexustiles.nexustiles import NexusTileService
 from shapely.geometry import box
 from tornado.testing import bind_unused_port
 from tornado.web import Application
-from webservice.webapp import ModularNexusHandlerWrapper
+#from webapp import ModularNexusHandlerWrapper
 from webservice.webmodel import NexusRequestObject
+from webservice.nexus_tornado.request.handlers import NexusRequestHandler
 
 from webservice.algorithms import LongitudeLatitudeMap
 
@@ -54,18 +54,18 @@ class TestLongitudeLatitudeMap(unittest.TestCase):
         boxes = self.tile_service.get_distinct_bounding_boxes_in_polygon(bounding, ds,
                                                                          start_seconds_from_epoch,
                                                                          end_seconds_from_epoch)
-        print LongitudeLatitudeMap.LongitudeLatitudeMapHandlerImpl.results_to_dicts(
+        print LongitudeLatitudeMap.LongitudeLatitudeMapCalcHandlerImpl.results_to_dicts(
             LongitudeLatitudeMap.lat_lon_map_driver(bounding, start_seconds_from_epoch, end_seconds_from_epoch, ds,
                                                     [a_box.bounds for a_box in boxes]))
 
 
 class HttpIntegrationTest(unittest.TestCase):
     def get_app(self):
-        path = LongitudeLatitudeMap.LongitudeLatitudeMapHandlerImpl.path
-        algorithm = AlgorithmModuleWrapper(LongitudeLatitudeMap.LongitudeLatitudeMapHandlerImpl)
+        path = LongitudeLatitudeMap.LongitudeLatitudeMapCalcHandlerImpl.path
+        algorithm = LongitudeLatitudeMap.LongitudeLatitudeMapCalcHandlerImpl
         thread_pool = ThreadPool(processes=1)
         return Application(
-            [(path, ModularNexusHandlerWrapper, dict(clazz=algorithm, algorithm_config=None, thread_pool=thread_pool))],
+            [(path, NexusRequestHandler, dict(clazz=algorithm, algorithm_config=None, thread_pool=thread_pool))],
             default_host=bind_unused_port()
         )
 
@@ -86,7 +86,7 @@ class HttpIntegrationTest(unittest.TestCase):
         request_handler_mock = MagicMock()
         request_handler_mock.get_argument.side_effect = get_argument
         request = NexusRequestObject(request_handler_mock)
-        handler_impl = LongitudeLatitudeMap.LongitudeLatitudeMapHandlerImpl()
+        handler_impl = LongitudeLatitudeMap.LongitudeLatitudeMapCalcHandlerImpl()
 
         response = handler_impl.calc(request)
 
diff --git a/analysis/webservice/NexusHandler.py b/analysis/webservice/NexusHandler.py
index 1fc035d..42972ec 100644
--- a/analysis/webservice/NexusHandler.py
+++ b/analysis/webservice/NexusHandler.py
@@ -15,16 +15,8 @@
 
 
 import logging
-import time
 import types
 
-import numpy as np
-from netCDF4 import Dataset
-from nexustiles.nexustiles import NexusTileService
-from webservice.metrics.MetricsField import NumberMetricsField, SparkAccumulatorMetricsField
-from webservice.metrics.MetricsRecord import MetricsRecord
-from webservice.webmodel import NexusProcessingException
-
 AVAILABLE_HANDLERS = []
 AVAILABLE_INITIALIZERS = []
 
@@ -43,9 +35,9 @@ def nexus_initializer(clazz):
 def nexus_handler(clazz):
     log = logging.getLogger(__name__)
     try:
-        wrapper = AlgorithmModuleWrapper(clazz)
-        log.info("Adding algorithm module '%s' with path '%s' (%s)" % (wrapper.name(), wrapper.path(), wrapper.clazz()))
-        AVAILABLE_HANDLERS.append(wrapper)
+        clazz.validate()
+        log.info("Adding algorithm module '%s' with path '%s' (%s)" % (clazz.name, clazz.path, clazz))
+        AVAILABLE_HANDLERS.append(clazz)
     except Exception as ex:
         log.warn("Handler '%s' is invalid and will be skipped (reason: %s)" % (clazz, ex.message), exc_info=True)
     return clazz
@@ -126,496 +118,5 @@ class NexusInitializerWrapper:
             self.log("Initializer '%s' has already been run" % self.__clazz)
 
 
-class AlgorithmModuleWrapper:
-    def __init__(self, clazz):
-        self.__instance = None
-        self.__clazz = clazz
-        self.validate()
-
-    def validate(self):
-        if "calc" not in self.__clazz.__dict__ or not type(self.__clazz.__dict__["calc"]) == types.FunctionType:
-            raise Exception("Method 'calc' has not been declared")
-
-        if "path" not in self.__clazz.__dict__:
-            raise Exception("Property 'path' has not been defined")
-
-        if "name" not in self.__clazz.__dict__:
-            raise Exception("Property 'name' has not been defined")
-
-        if "description" not in self.__clazz.__dict__:
-            raise Exception("Property 'description' has not been defined")
-
-        if "params" not in self.__clazz.__dict__:
-            raise Exception("Property 'params' has not been defined")
-
-    def clazz(self):
-        return self.__clazz
-
-    def name(self):
-        return self.__clazz.name
-
-    def path(self):
-        return self.__clazz.path
-
-    def description(self):
-        return self.__clazz.description
-
-    def params(self):
-        return self.__clazz.params
-
-    def instance(self, algorithm_config=None, sc=None):
-        if "singleton" in self.__clazz.__dict__ and self.__clazz.__dict__["singleton"] is True:
-            if self.__instance is None:
-                self.__instance = self.__clazz()
-
-                try:
-                    self.__instance.set_config(algorithm_config)
-                except AttributeError:
-                    pass
-
-                try:
-                    self.__instance.set_spark_context(sc)
-                except AttributeError:
-                    pass
-
-            return self.__instance
-        else:
-            instance = self.__clazz()
-
-            try:
-                instance.set_config(algorithm_config)
-            except AttributeError:
-                pass
-
-            try:
-                self.__instance.set_spark_context(sc)
-            except AttributeError:
-                pass
-            return instance
-
-    def isValid(self):
-        try:
-            self.validate()
-            return True
-        except Exception as ex:
-            return False
-
-
-class CalcHandler(object):
-    def calc(self, computeOptions, **args):
-        raise Exception("calc() not yet implemented")
-
-
-class NexusHandler(CalcHandler):
-    def __init__(self, skipCassandra=False, skipSolr=False):
-        CalcHandler.__init__(self)
-
-        self.algorithm_config = None
-        self._skipCassandra = skipCassandra
-        self._skipSolr = skipSolr
-        self.__tile_service = None  # instantiate the tile service after config is fully loaded
-
-    def set_config(self, algorithm_config):
-        self.algorithm_config = algorithm_config
-
-    def _get_tile_service(self):
-        if self.__tile_service is None:
-            self.__tile_service = NexusTileService(skipDatastore=self._skipCassandra,
-                                                   skipMetadatastore=self._skipSolr,
-                                                   config=self.algorithm_config)
-        return self.__tile_service
-
-
-    def _mergeDicts(self, x, y):
-        z = x.copy()
-        z.update(y)
-        return z
-
-    def _now(self):
-        millis = int(round(time.time() * 1000))
-        return millis
-
-    def _mergeDataSeries(self, resultsData, dataNum, resultsMap):
-
-        for entry in resultsData:
-
-            # frmtdTime = datetime.fromtimestamp(entry["time"] ).strftime("%Y-%m")
-            frmtdTime = entry["time"]
-
-            if not frmtdTime in resultsMap:
-                resultsMap[frmtdTime] = []
-            entry["ds"] = dataNum
-            resultsMap[frmtdTime].append(entry)
-
-    def _resultsMapToList(self, resultsMap):
-        resultsList = []
-        for key, value in resultsMap.iteritems():
-            resultsList.append(value)
-
-        resultsList = sorted(resultsList, key=lambda entry: entry[0]["time"])
-        return resultsList
-
-    def _mergeResults(self, resultsRaw):
-        resultsMap = {}
-
-        for i in range(0, len(resultsRaw)):
-            resultsSeries = resultsRaw[i]
-            resultsData = resultsSeries[0]
-            self._mergeDataSeries(resultsData, i, resultsMap)
-
-        resultsList = self._resultsMapToList(resultsMap)
-        return resultsList
-
-
-class SparkHandler(NexusHandler):
-    class SparkJobContext(object):
-
-        class MaxConcurrentJobsReached(Exception):
-            def __init__(self, *args, **kwargs):
-                Exception.__init__(self, *args, **kwargs)
-
-        def __init__(self, job_stack):
-            self.spark_job_stack = job_stack
-            self.job_name = None
-            self.log = logging.getLogger(__name__)
-
-        def __enter__(self):
-            try:
-                self.job_name = self.spark_job_stack.pop()
-                self.log.debug("Using %s" % self.job_name)
-            except IndexError:
-                raise SparkHandler.SparkJobContext.MaxConcurrentJobsReached()
-            return self
-
-        def __exit__(self, exc_type, exc_val, exc_tb):
-            if self.job_name is not None:
-                self.log.debug("Returning %s" % self.job_name)
-                self.spark_job_stack.append(self.job_name)
-
-    def __init__(self, **kwargs):
-        import inspect
-        NexusHandler.__init__(self, **kwargs)
-        self._sc = None
-
-        self.spark_job_stack = []
-
-        def with_spark_job_context(calc_func):
-            from functools import wraps
-
-            @wraps(calc_func)
-            def wrapped(*args, **kwargs1):
-                try:
-                    with SparkHandler.SparkJobContext(self.spark_job_stack) as job_context:
-                        # TODO Pool and Job are forced to a 1-to-1 relationship
-                        calc_func.im_self._sc.setLocalProperty("spark.scheduler.pool", job_context.job_name)
-                        calc_func.im_self._sc.setJobGroup(job_context.job_name, "a spark job")
-                        return calc_func(*args, **kwargs1)
-                except SparkHandler.SparkJobContext.MaxConcurrentJobsReached:
-                    raise NexusProcessingException(code=503,
-                                                   reason="Max concurrent requests reached. Please try again later.")
-
-            return wrapped
-
-        for member in inspect.getmembers(self, predicate=inspect.ismethod):
-            if member[0] == "calc":
-                setattr(self, member[0], with_spark_job_context(member[1]))
-
-    def set_spark_context(self, sc):
-        self._sc = sc
-
-    def set_config(self, algorithm_config):
-        max_concurrent_jobs = algorithm_config.getint("spark", "maxconcurrentjobs") if algorithm_config.has_section(
-            "spark") and algorithm_config.has_option("spark", "maxconcurrentjobs") else 10
-        self.spark_job_stack = list(["Job %s" % x for x in xrange(1, max_concurrent_jobs + 1)])
-        self.algorithm_config = algorithm_config
-
-    def _setQueryParams(self, ds, bounds, start_time=None, end_time=None,
-                        start_year=None, end_year=None, clim_month=None,
-                        fill=-9999.):
-        self._ds = ds
-        self._minLat, self._maxLat, self._minLon, self._maxLon = bounds
-        self._startTime = start_time
-        self._endTime = end_time
-        self._startYear = start_year
-        self._endYear = end_year
-        self._climMonth = clim_month
-        self._fill = fill
-
-    def _set_info_from_tile_set(self, nexus_tiles):
-        ntiles = len(nexus_tiles)
-        self.log.debug('Attempting to extract info from {0} tiles'. \
-                       format(ntiles))
-        status = False
-        self._latRes = None
-        self._lonRes = None
-        for tile in nexus_tiles:
-            self.log.debug('tile coords:')
-            self.log.debug('tile lats: {0}'.format(tile.latitudes))
-            self.log.debug('tile lons: {0}'.format(tile.longitudes))
-            if self._latRes is None:
-                lats = tile.latitudes.data
-                if (len(lats) > 1):
-                    self._latRes = abs(lats[1] - lats[0])
-            if self._lonRes is None:
-                lons = tile.longitudes.data
-                if (len(lons) > 1):
-                    self._lonRes = abs(lons[1] - lons[0])
-            if ((self._latRes is not None) and
-                    (self._lonRes is not None)):
-                lats_agg = np.concatenate([tile.latitudes.compressed()
-                                           for tile in nexus_tiles])
-                lons_agg = np.concatenate([tile.longitudes.compressed()
-                                           for tile in nexus_tiles])
-                self._minLatCent = np.min(lats_agg)
-                self._maxLatCent = np.max(lats_agg)
-                self._minLonCent = np.min(lons_agg)
-                self._maxLonCent = np.max(lons_agg)
-                self._nlats = int((self._maxLatCent - self._minLatCent) /
-                                  self._latRes + 0.5) + 1
-                self._nlons = int((self._maxLonCent - self._minLonCent) /
-                                  self._lonRes + 0.5) + 1
-                status = True
-                break
-        return status
-
-    def _find_global_tile_set(self, metrics_callback=None):
-        # This only works for a single dataset.  If more than one is provided,
-        # we use the first one and ignore the rest.
-        if type(self._ds) in (list, tuple):
-            ds = self._ds[0]
-        else:
-            ds = self._ds
-
-        # See what time stamps are in the specified range.
-        t_in_range = self._get_tile_service().find_days_in_range_asc(self._minLat,
-                                                               self._maxLat,
-                                                               self._minLon,
-                                                               self._maxLon,
-                                                               ds,
-                                                               self._startTime,
-                                                               self._endTime,
-                                                               metrics_callback=metrics_callback)
-
-        # Empty tile set will be returned upon failure to find the global
-        # tile set.
-        nexus_tiles = []
-
-        # Check one time stamp at a time and attempt to extract the global
-        # tile set.
-        for t in t_in_range:
-            nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon,
-                                                                      self._maxLon, ds=ds, start_time=t, end_time=t,
-                                                                      metrics_callback=metrics_callback)
-            if self._set_info_from_tile_set(nexus_tiles):
-                # Successfully retrieved global tile set from nexus_tiles,
-                # so no need to check any other time stamps.
-                break
-        return nexus_tiles
-
-    def _find_tile_bounds(self, t):
-        lats = t.latitudes
-        lons = t.longitudes
-        if (len(lats.compressed()) > 0) and (len(lons.compressed()) > 0):
-            min_lat = np.ma.min(lats)
-            max_lat = np.ma.max(lats)
-            min_lon = np.ma.min(lons)
-            max_lon = np.ma.max(lons)
-            good_inds_lat = np.where(lats.mask == False)[0]
-            good_inds_lon = np.where(lons.mask == False)[0]
-            min_y = np.min(good_inds_lat)
-            max_y = np.max(good_inds_lat)
-            min_x = np.min(good_inds_lon)
-            max_x = np.max(good_inds_lon)
-            bounds = (min_lat, max_lat, min_lon, max_lon,
-                      min_y, max_y, min_x, max_x)
-        else:
-            self.log.warn('Nothing in this tile!')
-            bounds = None
-        return bounds
-
-    @staticmethod
-    def query_by_parts(tile_service, min_lat, max_lat, min_lon, max_lon,
-                       dataset, start_time, end_time, part_dim=0):
-        nexus_max_tiles_per_query = 100
-        # print 'trying query: ',min_lat, max_lat, min_lon, max_lon, \
-        #    dataset, start_time, end_time
-        try:
-            tiles = \
-                tile_service.find_tiles_in_box(min_lat, max_lat,
-                                               min_lon, max_lon,
-                                               dataset,
-                                               start_time=start_time,
-                                               end_time=end_time,
-                                               fetch_data=False)
-            assert (len(tiles) <= nexus_max_tiles_per_query)
-        except:
-            # print 'failed query: ',min_lat, max_lat, min_lon, max_lon, \
-            #    dataset, start_time, end_time
-            if part_dim == 0:
-                # Partition by latitude.
-                mid_lat = (min_lat + max_lat) / 2
-                nexus_tiles = SparkHandler.query_by_parts(tile_service,
-                                                          min_lat, mid_lat,
-                                                          min_lon, max_lon,
-                                                          dataset,
-                                                          start_time, end_time,
-                                                          part_dim=part_dim)
-                nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
-                                                               mid_lat,
-                                                               max_lat,
-                                                               min_lon,
-                                                               max_lon,
-                                                               dataset,
-                                                               start_time,
-                                                               end_time,
-                                                               part_dim=part_dim))
-            elif part_dim == 1:
-                # Partition by longitude.
-                mid_lon = (min_lon + max_lon) / 2
-                nexus_tiles = SparkHandler.query_by_parts(tile_service,
-                                                          min_lat, max_lat,
-                                                          min_lon, mid_lon,
-                                                          dataset,
-                                                          start_time, end_time,
-                                                          part_dim=part_dim)
-                nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
-                                                               min_lat,
-                                                               max_lat,
-                                                               mid_lon,
-                                                               max_lon,
-                                                               dataset,
-                                                               start_time,
-                                                               end_time,
-                                                               part_dim=part_dim))
-            elif part_dim == 2:
-                # Partition by time.
-                mid_time = (start_time + end_time) / 2
-                nexus_tiles = SparkHandler.query_by_parts(tile_service,
-                                                          min_lat, max_lat,
-                                                          min_lon, max_lon,
-                                                          dataset,
-                                                          start_time, mid_time,
-                                                          part_dim=part_dim)
-                nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
-                                                               min_lat,
-                                                               max_lat,
-                                                               min_lon,
-                                                               max_lon,
-                                                               dataset,
-                                                               mid_time,
-                                                               end_time,
-                                                               part_dim=part_dim))
-        else:
-            # No exception, so query Cassandra for the tile data.
-            # print 'Making NEXUS query to Cassandra for %d tiles...' % \
-            #    len(tiles)
-            # t1 = time.time()
-            # print 'NEXUS call start at time %f' % t1
-            # sys.stdout.flush()
-            nexus_tiles = list(tile_service.fetch_data_for_tiles(*tiles))
-            nexus_tiles = list(tile_service.mask_tiles_to_bbox(min_lat, max_lat,
-                                                               min_lon, max_lon,
-                                                               nexus_tiles))
-            # t2 = time.time()
-            # print 'NEXUS call end at time %f' % t2
-            # print 'Seconds in NEXUS call: ', t2-t1
-            # sys.stdout.flush()
-
-        # print 'Returning %d tiles' % len(nexus_tiles)
-        return nexus_tiles
-
-    @staticmethod
-    def _prune_tiles(nexus_tiles):
-        del_ind = np.where([np.all(tile.data.mask) for tile in nexus_tiles])[0]
-        for i in np.flipud(del_ind):
-            del nexus_tiles[i]
-
-    def _lat2ind(self, lat):
-        return int((lat - self._minLatCent) / self._latRes + 0.5)
-
-    def _lon2ind(self, lon):
-        return int((lon - self._minLonCent) / self._lonRes + 0.5)
-
-    def _ind2lat(self, y):
-        return self._minLatCent + y * self._latRes
-
-    def _ind2lon(self, x):
-        return self._minLonCent + x * self._lonRes
-
-    def _create_nc_file_time1d(self, a, fname, varname, varunits=None,
-                               fill=None):
-        self.log.debug('a={0}'.format(a))
-        self.log.debug('shape a = {0}'.format(a.shape))
-        assert len(a.shape) == 1
-        time_dim = len(a)
-        rootgrp = Dataset(fname, "w", format="NETCDF4")
-        rootgrp.createDimension("time", time_dim)
-        vals = rootgrp.createVariable(varname, "f4", dimensions=("time",),
-                                      fill_value=fill)
-        times = rootgrp.createVariable("time", "f4", dimensions=("time",))
-        vals[:] = [d['mean'] for d in a]
-        times[:] = [d['time'] for d in a]
-        if varunits is not None:
-            vals.units = varunits
-        times.units = 'seconds since 1970-01-01 00:00:00'
-        rootgrp.close()
-
-    def _create_nc_file_latlon2d(self, a, fname, varname, varunits=None,
-                                 fill=None):
-        self.log.debug('a={0}'.format(a))
-        self.log.debug('shape a = {0}'.format(a.shape))
-        assert len(a.shape) == 2
-        lat_dim, lon_dim = a.shape
-        rootgrp = Dataset(fname, "w", format="NETCDF4")
-        rootgrp.createDimension("lat", lat_dim)
-        rootgrp.createDimension("lon", lon_dim)
-        vals = rootgrp.createVariable(varname, "f4",
-                                      dimensions=("lat", "lon",),
-                                      fill_value=fill)
-        lats = rootgrp.createVariable("lat", "f4", dimensions=("lat",))
-        lons = rootgrp.createVariable("lon", "f4", dimensions=("lon",))
-        vals[:, :] = a
-        lats[:] = np.linspace(self._minLatCent,
-                              self._maxLatCent, lat_dim)
-        lons[:] = np.linspace(self._minLonCent,
-                              self._maxLonCent, lon_dim)
-        if varunits is not None:
-            vals.units = varunits
-        lats.units = "degrees north"
-        lons.units = "degrees east"
-        rootgrp.close()
-
-    def _create_nc_file(self, a, fname, varname, **kwargs):
-        self._create_nc_file_latlon2d(a, fname, varname, **kwargs)
-
-    def _spark_nparts(self, nparts_requested):
-        max_parallelism = 128
-        num_partitions = min(nparts_requested if nparts_requested > 0
-                             else self._sc.defaultParallelism,
-                             max_parallelism)
-        return num_partitions
-
-    def _create_metrics_record(self):
-        return MetricsRecord([
-            SparkAccumulatorMetricsField(key='num_tiles',
-                                         description='Number of tiles fetched',
-                                         accumulator=self._sc.accumulator(0)),
-            SparkAccumulatorMetricsField(key='partitions',
-                                         description='Number of Spark partitions',
-                                         accumulator=self._sc.accumulator(0)),
-            SparkAccumulatorMetricsField(key='cassandra',
-                                         description='Cumulative time to fetch data from Cassandra',
-                                         accumulator=self._sc.accumulator(0)),
-            SparkAccumulatorMetricsField(key='solr',
-                                         description='Cumulative time to fetch data from Solr',
-                                         accumulator=self._sc.accumulator(0)),
-            SparkAccumulatorMetricsField(key='calculation',
-                                         description='Cumulative time to do calculations',
-                                         accumulator=self._sc.accumulator(0)),
-            NumberMetricsField(key='reduce', description='Actual time to reduce results'),
-            NumberMetricsField(key="actual_time", description="Total (actual) time")
-        ])
-
-
 def executeInitializers(config):
     [wrapper.init(config) for wrapper in AVAILABLE_INITIALIZERS]
diff --git a/analysis/webservice/WorkflowDriver.py b/analysis/webservice/WorkflowDriver.py
index de6c0e9..0a530cb 100644
--- a/analysis/webservice/WorkflowDriver.py
+++ b/analysis/webservice/WorkflowDriver.py
@@ -15,7 +15,7 @@
 
 import argparse
 
-from algorithms.MapFetchHandler import MapFetchHandler
+from algorithms.MapFetchHandler import MapFetchCalcHandler
 
 
 def start(args):
@@ -30,7 +30,7 @@ def start(args):
     interp = args.i
     time_interval = args.t
 
-    map = MapFetchHandler()
+    map = MapFetchCalcHandler()
     map.generate(dataset_shortname, granule_name, prefix, ct, interp, _min, _max, width, height, time_interval)
 
 
diff --git a/analysis/webservice/__init__.py b/analysis/webservice/__init__.py
index 8afd240..6acb5d1 100644
--- a/analysis/webservice/__init__.py
+++ b/analysis/webservice/__init__.py
@@ -12,4 +12,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/analysis/webservice/algorithms/Capabilities.py b/analysis/webservice/algorithms/Capabilities.py
index c3931e2..f507587 100644
--- a/analysis/webservice/algorithms/Capabilities.py
+++ b/analysis/webservice/algorithms/Capabilities.py
@@ -16,12 +16,13 @@
 
 import json
 
-from webservice.NexusHandler import CalcHandler, nexus_handler, AVAILABLE_HANDLERS
+from webservice.NexusHandler import nexus_handler, AVAILABLE_HANDLERS
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusResults
 
 
 @nexus_handler
-class CapabilitiesListHandlerImpl(CalcHandler):
+class CapabilitiesListCalcHandlerImpl(NexusCalcHandler):
     name = "Capabilities"
     path = "/capabilities"
     description = "Lists the current capabilities of this Nexus system"
@@ -29,17 +30,17 @@ class CapabilitiesListHandlerImpl(CalcHandler):
     singleton = True
 
     def __init__(self):
-        CalcHandler.__init__(self)
+        NexusCalcHandler.__init__(self)
 
     def calc(self, computeOptions, **args):
         capabilities = []
 
         for capability in AVAILABLE_HANDLERS:
             capabilityDef = {
-                "name": capability.name(),
-                "path": capability.path(),
-                "description": capability.description(),
-                "parameters": capability.params()
+                "name": capability.name,
+                "path": capability.path,
+                "description": capability.description,
+                "parameters": capability.params
             }
             capabilities.append(capabilityDef)
 
diff --git a/analysis/webservice/algorithms/ColorBarHandler.py b/analysis/webservice/algorithms/ColorBarHandler.py
index 52429c6..e2bf491 100644
--- a/analysis/webservice/algorithms/ColorBarHandler.py
+++ b/analysis/webservice/algorithms/ColorBarHandler.py
@@ -20,12 +20,12 @@ import time
 import numpy as np
 
 import colortables
-from webservice.NexusHandler import NexusHandler as BaseHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler as BaseHandler
 from webservice.NexusHandler import nexus_handler
 
 
 @nexus_handler
-class ColorBarHandler(BaseHandler):
+class ColorBarCalcHandler(BaseHandler):
     name = "ColorBarHandler"
     path = "/colorbar"
     description = "Creates a CMC colorbar spec for a dataset"
diff --git a/analysis/webservice/algorithms/CorrelationMap.py b/analysis/webservice/algorithms/CorrelationMap.py
index 7f864a5..1726412 100644
--- a/analysis/webservice/algorithms/CorrelationMap.py
+++ b/analysis/webservice/algorithms/CorrelationMap.py
@@ -23,12 +23,13 @@ from nexustiles.model.nexusmodel import get_approximate_value_for_lat_lon
 from scipy.stats import linregress
 from shapely.geometry import box
 
-from webservice.NexusHandler import NexusHandler, nexus_handler, DEFAULT_PARAMETERS_SPEC
+from webservice.NexusHandler import nexus_handler, DEFAULT_PARAMETERS_SPEC
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusProcessingException, NexusResults
 
 
 @nexus_handler
-class LongitudeLatitudeMapHandlerImpl(NexusHandler):
+class LongitudeLatitudeMapCalcHandlerImpl(NexusCalcHandler):
     name = "Correlation Map"
     path = "/correlationMap"
     description = "Computes a correlation map between two datasets given an arbitrary geographical area and time range"
@@ -41,7 +42,7 @@ class LongitudeLatitudeMapHandlerImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self)
+        NexusCalcHandler.__init__(self)
 
     def calc(self, computeOptions, **args):
         minLat = computeOptions.get_min_lat()
diff --git a/analysis/webservice/algorithms/DailyDifferenceAverage.py b/analysis/webservice/algorithms/DailyDifferenceAverage.py
index 8861d09..1b4d642 100644
--- a/analysis/webservice/algorithms/DailyDifferenceAverage.py
+++ b/analysis/webservice/algorithms/DailyDifferenceAverage.py
@@ -24,14 +24,15 @@ import pytz
 from nexustiles.nexustiles import NexusTileService, NexusTileServiceException
 from shapely.geometry import box
 
-from webservice.NexusHandler import NexusHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusResults, NexusProcessingException
 
 SENTINEL = 'STOP'
 
 
 @nexus_handler
-class DailyDifferenceAverageImpl(NexusHandler):
+class DailyDifferenceAverageImpl(NexusCalcHandler):
     name = "Daily Difference Average"
     path = "/dailydifferenceaverage"
     description = "Subtracts data in box in Dataset 1 from Dataset 2, then averages the difference per day."
@@ -80,7 +81,7 @@ class DailyDifferenceAverageImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self, skipCassandra=True)
+        NexusCalcHandler.__init__(self, skipCassandra=True)
 
     def calc(self, request, **args):
         min_lat, max_lat, min_lon, max_lon = request.get_min_lat(), request.get_max_lat(), request.get_min_lon(), request.get_max_lon()
diff --git a/analysis/webservice/algorithms/DataInBoundsSearch.py b/analysis/webservice/algorithms/DataInBoundsSearch.py
index 7c426c7..2da6891 100644
--- a/analysis/webservice/algorithms/DataInBoundsSearch.py
+++ b/analysis/webservice/algorithms/DataInBoundsSearch.py
@@ -19,7 +19,8 @@ from datetime import datetime
 
 from pytz import timezone
 
-from webservice.NexusHandler import NexusHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusResults, NexusProcessingException
 
 EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
@@ -27,7 +28,7 @@ ISO_8601 = '%Y-%m-%dT%H:%M:%S%z'
 
 
 @nexus_handler
-class DataInBoundsSearchHandlerImpl(NexusHandler):
+class DataInBoundsSearchCalcHandlerImpl(NexusCalcHandler):
     name = "Data In-Bounds Search"
     path = "/datainbounds"
     description = "Fetches point values for a given dataset and geographical area"
@@ -67,7 +68,7 @@ class DataInBoundsSearchHandlerImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self)
+        NexusCalcHandler.__init__(self)
         self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
diff --git a/analysis/webservice/algorithms/DataSeriesList.py b/analysis/webservice/algorithms/DataSeriesList.py
index cba5590..16736b2 100644
--- a/analysis/webservice/algorithms/DataSeriesList.py
+++ b/analysis/webservice/algorithms/DataSeriesList.py
@@ -16,20 +16,20 @@
 
 import json
 
-from webservice.NexusHandler import NexusHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.NexusHandler import nexus_handler
 from webservice.webmodel import cached
 
 
 @nexus_handler
-class DataSeriesListHandlerImpl(NexusHandler):
+class DataSeriesListCalcHandlerImpl(NexusCalcHandler):
     name = "Dataset List"
     path = "/list"
     description = "Lists datasets currently available for analysis"
     params = {}
 
     def __init__(self):
-        NexusHandler.__init__(self, skipCassandra=True)
+        NexusCalcHandler.__init__(self, skipCassandra=True)
 
     @cached(ttl=(60 * 60 * 1000))  # 1 hour cached
     def calc(self, computeOptions, **args):
diff --git a/analysis/webservice/algorithms/DelayTest.py b/analysis/webservice/algorithms/DelayTest.py
index 7459f93..e2c1b30 100644
--- a/analysis/webservice/algorithms/DelayTest.py
+++ b/analysis/webservice/algorithms/DelayTest.py
@@ -16,12 +16,12 @@
 
 import time
 
-from webservice.NexusHandler import CalcHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.NexusHandler import nexus_handler
 
 
 @nexus_handler
-class DelayHandlerImpl(CalcHandler):
+class DelayCalcHandlerImpl(NexusCalcHandler):
     name = "Delay"
     path = "/delay"
     description = "Waits a little while"
@@ -29,7 +29,7 @@ class DelayHandlerImpl(CalcHandler):
     singleton = True
 
     def __init__(self):
-        CalcHandler.__init__(self)
+        NexusCalcHandler.__init__(self)
 
     def calc(self, computeOptions, **args):
         time.sleep(10)
diff --git a/analysis/webservice/algorithms/ErrorTosserTest.py b/analysis/webservice/algorithms/ErrorTosserTest.py
index 2845a92..dc4d617 100644
--- a/analysis/webservice/algorithms/ErrorTosserTest.py
+++ b/analysis/webservice/algorithms/ErrorTosserTest.py
@@ -14,11 +14,12 @@
 # limitations under the License.
 
 
-from webservice.NexusHandler import CalcHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 
 
 @nexus_handler
-class ErrorTosserHandler(CalcHandler):
+class ErrorTosserCalcHandler(NexusCalcHandler):
     name = "MakeError"
     path = "/makeerror"
     description = "Causes an error"
@@ -26,7 +27,7 @@ class ErrorTosserHandler(CalcHandler):
     singleton = True
 
     def __init__(self):
-        CalcHandler.__init__(self)
+        NexusCalcHandler.__init__(self)
 
     def calc(self, computeOptions, **args):
         a = 100 / 0.0
diff --git a/analysis/webservice/algorithms/Heartbeat.py b/analysis/webservice/algorithms/Heartbeat.py
index fd69bfd..ae7fcee 100644
--- a/analysis/webservice/algorithms/Heartbeat.py
+++ b/analysis/webservice/algorithms/Heartbeat.py
@@ -16,11 +16,12 @@
 
 import json
 
-from webservice.NexusHandler import NexusHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 
 
 @nexus_handler
-class HeartbeatHandlerImpl(NexusHandler):
+class HeartbeatCalcHandlerImpl(NexusCalcHandler):
     name = "Backend Services Status"
     path = "/heartbeat"
     description = "Returns health status of Nexus backend services"
@@ -28,7 +29,7 @@ class HeartbeatHandlerImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self, skipCassandra=True)
+        NexusCalcHandler.__init__(self, skipCassandra=True)
 
     def calc(self, computeOptions, **args):
         solrOnline = self._get_tile_service().pingSolr()
diff --git a/analysis/webservice/algorithms/HofMoeller.py b/analysis/webservice/algorithms/HofMoeller.py
index 929e03c..563ea3d 100644
--- a/analysis/webservice/algorithms/HofMoeller.py
+++ b/analysis/webservice/algorithms/HofMoeller.py
@@ -21,19 +21,23 @@ from cStringIO import StringIO
 from datetime import datetime
 from multiprocessing.dummy import Pool, Manager
 
+import matplotlib
 import matplotlib.pyplot as plt
 import mpld3
 import numpy as np
 from matplotlib import cm
 from matplotlib.ticker import FuncFormatter
 
-from webservice.NexusHandler import NexusHandler, nexus_handler, DEFAULT_PARAMETERS_SPEC
+from webservice.NexusHandler import nexus_handler, DEFAULT_PARAMETERS_SPEC
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusProcessingException, NexusResults
 
 SENTINEL = 'STOP'
 LATITUDE = 0
 LONGITUDE = 1
 
+if not matplotlib.get_backend():
+    matplotlib.use('Agg')
 
 class LongitudeHofMoellerCalculator(object):
     def longitude_time_hofmoeller_stats(self, tile, index):
@@ -88,9 +92,9 @@ class LatitudeHofMoellerCalculator(object):
         return stat
 
 
-class BaseHoffMoellerHandlerImpl(NexusHandler):
+class BaseHoffMoellerCalcHandlerImpl(NexusCalcHandler):
     def __init__(self):
-        NexusHandler.__init__(self)
+        NexusCalcHandler.__init__(self)
         self.log = logging.getLogger(__name__)
 
     def applyDeseasonToHofMoellerByField(self, results, pivot="lats", field="avg", append=True):
@@ -117,7 +121,7 @@ class BaseHoffMoellerHandlerImpl(NexusHandler):
 
 
 @nexus_handler
-class LatitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerHandlerImpl):
+class LatitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerCalcHandlerImpl):
     name = "Latitude/Time HofMoeller"
     path = "/latitudeTimeHofMoeller"
     description = "Computes a latitude/time HofMoeller plot given an arbitrary geographical area and time range"
@@ -125,7 +129,7 @@ class LatitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerHandlerImpl):
     singleton = True
 
     def __init__(self):
-        BaseHoffMoellerHandlerImpl.__init__(self)
+        BaseHoffMoellerCalcHandlerImpl.__init__(self)
 
     def calc(self, computeOptions, **args):
         tiles = self._get_tile_service().get_tiles_bounded_by_box(computeOptions.get_min_lat(), computeOptions.get_max_lat(),
@@ -183,7 +187,7 @@ class LatitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerHandlerImpl):
 
 
 @nexus_handler
-class LongitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerHandlerImpl):
+class LongitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerCalcHandlerImpl):
     name = "Longitude/Time HofMoeller"
     path = "/longitudeTimeHofMoeller"
     description = "Computes a longitude/time HofMoeller plot given an arbitrary geographical area and time range"
@@ -191,7 +195,7 @@ class LongitudeTimeHoffMoellerHandlerImpl(BaseHoffMoellerHandlerImpl):
     singleton = True
 
     def __init__(self):
-        BaseHoffMoellerHandlerImpl.__init__(self)
+        BaseHoffMoellerCalcHandlerImpl.__init__(self)
 
     def calc(self, computeOptions, **args):
         tiles = self._get_tile_service().get_tiles_bounded_by_box(computeOptions.get_min_lat(), computeOptions.get_max_lat(),
diff --git a/analysis/webservice/algorithms/LongitudeLatitudeMap.py b/analysis/webservice/algorithms/LongitudeLatitudeMap.py
index 3277683..3f0467a 100644
--- a/analysis/webservice/algorithms/LongitudeLatitudeMap.py
+++ b/analysis/webservice/algorithms/LongitudeLatitudeMap.py
@@ -21,7 +21,8 @@ from datetime import datetime
 from pytz import timezone
 from shapely.geometry import box
 
-from webservice.NexusHandler import NexusHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusResults, NexusProcessingException
 
 SENTINEL = 'STOP'
@@ -30,7 +31,7 @@ tile_service = None
 
 
 @nexus_handler
-class LongitudeLatitudeMapHandlerImpl(NexusHandler):
+class LongitudeLatitudeMapCalcHandlerImpl(NexusCalcHandler):
     name = "Longitude/Latitude Time Average Map"
     path = "/longitudeLatitudeMap"
     description = "Computes a Latitude/Longitude Time Average plot given an arbitrary geographical area and time range"
@@ -74,7 +75,7 @@ class LongitudeLatitudeMapHandlerImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self, skipCassandra=True)
+        NexusCalcHandler.__init__(self, skipCassandra=True)
         self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
@@ -131,7 +132,7 @@ class LongitudeLatitudeMapHandlerImpl(NexusHandler):
             "endTime": datetime.utcfromtimestamp(end_seconds_from_epoch).strftime('%Y-%m-%dT%H:%M:%SZ')
         }
         return LongitudeLatitudeMapResults(
-            results=LongitudeLatitudeMapHandlerImpl.results_to_dicts(point_avg_over_time), meta=None,
+            results=LongitudeLatitudeMapCalcHandlerImpl.results_to_dicts(point_avg_over_time), meta=None,
             **kwargs)
 
     @staticmethod
diff --git a/analysis/webservice/algorithms/MapFetchHandler.py b/analysis/webservice/algorithms/MapFetchHandler.py
index 9c6be94..5fe3eec 100644
--- a/analysis/webservice/algorithms/MapFetchHandler.py
+++ b/analysis/webservice/algorithms/MapFetchHandler.py
@@ -31,12 +31,12 @@ from dateutil.relativedelta import *
 
 import colortables
 import webservice.GenerateImageMRF as MRF
-from webservice.NexusHandler import NexusHandler as BaseHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler as BaseHandler
 from webservice.NexusHandler import nexus_handler
 
 
 @nexus_handler
-class MapFetchHandler(BaseHandler):
+class MapFetchCalcHandler(BaseHandler):
     name = "MapFetchHandler"
     path = "/map"
     description = "Creates a map image"
@@ -114,7 +114,7 @@ class MapFetchHandler(BaseHandler):
                     value = np.max((min, value))
                     value = np.min((max, value))
                     value255 = int(round((value - min) / (max - min) * 255.0))
-                    rgba = MapFetchHandler.__get_color(value255, table)
+                    rgba = MapFetchCalcHandler.__get_color(value255, table)
                     img_data.putpixel((pixel_x, pixel_y), (rgba[0], rgba[1], rgba[2], 255))
 
     @staticmethod
@@ -148,7 +148,7 @@ class MapFetchHandler(BaseHandler):
         data_min = stats["minValue"] if np.isnan(force_min) else force_min
         data_max = stats["maxValue"] if np.isnan(force_max) else force_max
 
-        x_res, y_res = MapFetchHandler.__get_xy_resolution(nexus_tiles[0])
+        x_res, y_res = MapFetchCalcHandler.__get_xy_resolution(nexus_tiles[0])
         x_res = 1
         y_res = 1
 
@@ -158,9 +158,9 @@ class MapFetchHandler(BaseHandler):
         img_data = img.getdata()
 
         for tile in nexus_tiles:
-            MapFetchHandler.__tile_to_image(img_data, tile, data_min, data_max, table, x_res, y_res)
+            MapFetchCalcHandler.__tile_to_image(img_data, tile, data_min, data_max, table, x_res, y_res)
 
-        final_image = img.resize((width, height), MapFetchHandler.__translate_interpolation(interpolation))
+        final_image = img.resize((width, height), MapFetchCalcHandler.__translate_interpolation(interpolation))
 
         return final_image
 
@@ -188,13 +188,13 @@ class MapFetchHandler(BaseHandler):
             for y in range(0, img.height):
                 if data[x + (y * img.width)][3] == 255:
                     value = data[x + (y * img.width)][0]
-                    rgba = MapFetchHandler.__get_color(value, table)
+                    rgba = MapFetchCalcHandler.__get_color(value, table)
                     data.putpixel((x, y), (rgba[0], rgba[1], rgba[2], 255))
 
     @staticmethod
     def __create_no_data(width, height):
 
-        if MapFetchHandler.NO_DATA_IMAGE is None:
+        if MapFetchCalcHandler.NO_DATA_IMAGE is None:
             img = Image.new("RGBA", (width, height), (0, 0, 0, 0))
             draw = ImageDraw.Draw(img)
 
@@ -203,9 +203,9 @@ class MapFetchHandler(BaseHandler):
             for x in range(10, width, 100):
                 for y in range(10, height, 100):
                     draw.text((x, y), "NO DATA", (180, 180, 180), font=fnt)
-            MapFetchHandler.NO_DATA_IMAGE = img
+            MapFetchCalcHandler.NO_DATA_IMAGE = img
 
-        return MapFetchHandler.NO_DATA_IMAGE
+        return MapFetchCalcHandler.NO_DATA_IMAGE
 
     def calc(self, computeOptions, **args):
         ds = computeOptions.get_argument("ds", None)
diff --git a/analysis/webservice/algorithms/NexusCalcHandler.py b/analysis/webservice/algorithms/NexusCalcHandler.py
new file mode 100644
index 0000000..b5f220f
--- /dev/null
+++ b/analysis/webservice/algorithms/NexusCalcHandler.py
@@ -0,0 +1,77 @@
+import time
+import types
+
+from nexustiles.nexustiles import NexusTileService
+
+
+class NexusCalcHandler(object):
+    @classmethod
+    def validate(cls):
+        if "calc" not in cls.__dict__ or not type(cls.__dict__["calc"]) == types.FunctionType:
+            raise Exception("Method 'calc' has not been declared")
+
+        if "path" not in cls.__dict__:
+            raise Exception("Property 'path' has not been defined")
+
+        if "name" not in cls.__dict__:
+            raise Exception("Property 'name' has not been defined")
+
+        if "description" not in cls.__dict__:
+            raise Exception("Property 'description' has not been defined")
+
+        if "params" not in cls.__dict__:
+            raise Exception("Property 'params' has not been defined")
+
+    def __init__(self, algorithm_config=None, skipCassandra=False, skipSolr=False):
+        self.algorithm_config = algorithm_config
+        self._skipCassandra = skipCassandra
+        self._skipSolr = skipSolr
+        self._tile_service = NexusTileService(skipDatastore=self._skipCassandra,
+                                              skipMetadatastore=self._skipSolr,
+                                              config=self.algorithm_config)
+
+    def _get_tile_service(self):
+        return self._tile_service
+
+    def calc(self, computeOptions, **args):
+        raise Exception("calc() not yet implemented")
+
+    def _mergeDicts(self, x, y):
+        z = x.copy()
+        z.update(y)
+        return z
+
+    def _now(self):
+        millis = int(round(time.time() * 1000))
+        return millis
+
+    def _mergeDataSeries(self, resultsData, dataNum, resultsMap):
+
+        for entry in resultsData:
+
+            # frmtdTime = datetime.fromtimestamp(entry["time"] ).strftime("%Y-%m")
+            frmtdTime = entry["time"]
+
+            if not frmtdTime in resultsMap:
+                resultsMap[frmtdTime] = []
+            entry["ds"] = dataNum
+            resultsMap[frmtdTime].append(entry)
+
+    def _resultsMapToList(self, resultsMap):
+        resultsList = []
+        for key, value in resultsMap.iteritems():
+            resultsList.append(value)
+
+        resultsList = sorted(resultsList, key=lambda entry: entry[0]["time"])
+        return resultsList
+
+    def _mergeResults(self, resultsRaw):
+        resultsMap = {}
+
+        for i in range(0, len(resultsRaw)):
+            resultsSeries = resultsRaw[i]
+            resultsData = resultsSeries[0]
+            self._mergeDataSeries(resultsData, i, resultsMap)
+
+        resultsList = self._resultsMapToList(resultsMap)
+        return resultsList
diff --git a/analysis/webservice/algorithms/StandardDeviationSearch.py b/analysis/webservice/algorithms/StandardDeviationSearch.py
index 0f924c4..231c687 100644
--- a/analysis/webservice/algorithms/StandardDeviationSearch.py
+++ b/analysis/webservice/algorithms/StandardDeviationSearch.py
@@ -22,7 +22,8 @@ from functools import partial
 from nexustiles.nexustiles import NexusTileServiceException
 from pytz import timezone
 
-from webservice.NexusHandler import NexusHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusProcessingException, CustomEncoder
 
 SENTINEL = 'STOP'
@@ -30,7 +31,7 @@ EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
 
 
 @nexus_handler
-class StandardDeviationSearchHandlerImpl(NexusHandler):
+class StandardDeviationSearchCalcHandlerImpl(NexusCalcHandler):
     name = "Standard Deviation Search"
     path = "/standardDeviation"
     description = "Retrieves the pixel standard deviation if it exists for a given longitude and latitude"
@@ -73,7 +74,7 @@ class StandardDeviationSearchHandlerImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self)
+        NexusCalcHandler.__init__(self)
         self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
@@ -121,7 +122,7 @@ class StandardDeviationSearchHandlerImpl(NexusHandler):
                            latitude=latitude, day_of_year=day_of_year)
 
         try:
-            results = StandardDeviationSearchHandlerImpl.to_result_dict(func())
+            results = StandardDeviationSearchCalcHandlerImpl.to_result_dict(func())
         except (NoTileException, NoStandardDeviationException):
             return StandardDeviationSearchResult(raw_args_dict, [])
 
diff --git a/analysis/webservice/algorithms/TileSearch.py b/analysis/webservice/algorithms/TileSearch.py
index 8bfcdbb..a3758bc 100644
--- a/analysis/webservice/algorithms/TileSearch.py
+++ b/analysis/webservice/algorithms/TileSearch.py
@@ -14,12 +14,12 @@
 # limitations under the License.
 
 
-from webservice.NexusHandler import NexusHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusResults
 
 
 # @nexus_handler
-class ChunkSearchHandlerImpl(NexusHandler):
+class ChunkSearchCalcHandlerImpl(NexusCalcHandler):
     name = "Data Tile Search"
     path = "/tiles"
     description = "Lists dataset tiles given a geographical area and time range"
@@ -63,7 +63,7 @@ class ChunkSearchHandlerImpl(NexusHandler):
     }
 
     def __init__(self):
-        NexusHandler.__init__(self, skipCassandra=True)
+        NexusCalcHandler.__init__(self, skipCassandra=True)
 
     def calc(self, computeOptions, **args):
         minLat = computeOptions.get_min_lat()
diff --git a/analysis/webservice/algorithms/TimeAvgMap.py b/analysis/webservice/algorithms/TimeAvgMap.py
index ce8085a..3a609c5 100644
--- a/analysis/webservice/algorithms/TimeAvgMap.py
+++ b/analysis/webservice/algorithms/TimeAvgMap.py
@@ -20,7 +20,8 @@
 import sys
 import numpy as np
 from time import time
-from webservice.NexusHandler import NexusHandler, DEFAULT_PARAMETERS_SPEC
+from webservice.NexusHandler import DEFAULT_PARAMETERS_SPEC
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusResults, NoDataException
 from netCDF4 import Dataset
 
@@ -29,7 +30,7 @@ from netCDF4 import Dataset
 
 
 # @nexus_handler
-class TimeAvgMapHandlerImpl(NexusHandler):
+class TimeAvgMapCalcHandlerImpl(NexusCalcHandler):
     name = "Time Average Map"
     path = "/timeAvgMap"
     description = "Computes a Latitude/Longitude Time Average plot given an arbitrary geographical area and time range"
@@ -37,7 +38,7 @@ class TimeAvgMapHandlerImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self, skipCassandra=False)
+        NexusCalcHandler.__init__(self, skipCassandra=False)
 
     def _find_native_resolution(self):
         # Get a quick set of tiles (1 degree at center of box) at 1 time stamp
diff --git a/analysis/webservice/algorithms/TimeSeries.py b/analysis/webservice/algorithms/TimeSeries.py
index 405e567..85613d9 100644
--- a/analysis/webservice/algorithms/TimeSeries.py
+++ b/analysis/webservice/algorithms/TimeSeries.py
@@ -33,7 +33,8 @@ from pytz import timezone
 from scipy import stats
 
 from webservice import Filtering as filtering
-from webservice.NexusHandler import NexusHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusResults, NexusProcessingException, NoDataException
 
 SENTINEL = 'STOP'
@@ -42,7 +43,7 @@ ISO_8601 = '%Y-%m-%dT%H:%M:%S%z'
 
 
 @nexus_handler
-class TimeSeriesHandlerImpl(NexusHandler):
+class TimeSeriesCalcHandlerImpl(NexusCalcHandler):
     name = "Time Series"
     path = "/stats"
     description = "Computes a time series plot between one or more datasets given an arbitrary geographical area and time range"
@@ -84,7 +85,7 @@ class TimeSeriesHandlerImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self)
+        NexusCalcHandler.__init__(self)
         self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
@@ -180,7 +181,7 @@ class TimeSeriesHandlerImpl(NexusHandler):
 
         if len(ds) == 2:
             try:
-                stats = TimeSeriesHandlerImpl.calculate_comparison_stats(results)
+                stats = TimeSeriesCalcHandlerImpl.calculate_comparison_stats(results)
             except Exception:
                 stats = {}
                 tb = traceback.format_exc()
diff --git a/analysis/webservice/algorithms/TimeSeriesSolr.py b/analysis/webservice/algorithms/TimeSeriesSolr.py
index 2cc8d37..fbe4d43 100644
--- a/analysis/webservice/algorithms/TimeSeriesSolr.py
+++ b/analysis/webservice/algorithms/TimeSeriesSolr.py
@@ -27,14 +27,15 @@ from nexustiles.nexustiles import NexusTileService
 from scipy import stats
 
 from webservice import Filtering as filt
-from webservice.NexusHandler import NexusHandler, nexus_handler, DEFAULT_PARAMETERS_SPEC
+from webservice.NexusHandler import nexus_handler, DEFAULT_PARAMETERS_SPEC
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
 from webservice.webmodel import NexusResults, NexusProcessingException, NoDataException
 
 SENTINEL = 'STOP'
 
 
 @nexus_handler
-class TimeSeriesHandlerImpl(NexusHandler):
+class TimeSeriesCalcHandlerImpl(NexusCalcHandler):
     name = "Time Series Solr"
     path = "/statsSolr"
     description = "Computes a time series plot between one or more datasets given an arbitrary geographical area and time range"
@@ -42,7 +43,7 @@ class TimeSeriesHandlerImpl(NexusHandler):
     singleton = True
 
     def __init__(self):
-        NexusHandler.__init__(self, skipCassandra=True)
+        NexusCalcHandler.__init__(self, skipCassandra=True)
         self.log = logging.getLogger(__name__)
 
     def calc(self, computeOptions, **args):
diff --git a/analysis/webservice/algorithms/doms/BaseDomsHandler.py b/analysis/webservice/algorithms/doms/BaseDomsHandler.py
index c9b8acf..d07f929 100644
--- a/analysis/webservice/algorithms/doms/BaseDomsHandler.py
+++ b/analysis/webservice/algorithms/doms/BaseDomsHandler.py
@@ -26,7 +26,7 @@ from pytz import timezone, UTC
 
 import config
 import geo
-from webservice.NexusHandler import NexusHandler as BaseHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler as BaseHandler
 from webservice.webmodel import NexusResults
 
 EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
@@ -44,7 +44,7 @@ import netCDF4
 import tempfile
 
 
-class BaseDomsQueryHandler(BaseHandler):
+class BaseDomsQueryCalcHandler(BaseHandler):
     def __init__(self):
         BaseHandler.__init__(self)
 
diff --git a/analysis/webservice/algorithms/doms/DatasetListQuery.py b/analysis/webservice/algorithms/doms/DatasetListQuery.py
index 7c418c9..ac7f263 100644
--- a/analysis/webservice/algorithms/doms/DatasetListQuery.py
+++ b/analysis/webservice/algorithms/doms/DatasetListQuery.py
@@ -21,13 +21,13 @@ import requests
 import BaseDomsHandler
 import config
 import values
-from webservice.NexusHandler import NexusHandler as BaseHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler as BaseHandler
 from webservice.NexusHandler import nexus_handler
 from webservice.webmodel import cached
 
 
 @nexus_handler
-class DomsDatasetListQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class DomsDatasetListQueryHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "DOMS Dataset Listing"
     path = "/domslist"
     description = ""
diff --git a/analysis/webservice/algorithms/doms/DomsInitialization.py b/analysis/webservice/algorithms/doms/DomsInitialization.py
index 21832c1..2d429ca 100644
--- a/analysis/webservice/algorithms/doms/DomsInitialization.py
+++ b/analysis/webservice/algorithms/doms/DomsInitialization.py
@@ -20,11 +20,10 @@ import logging
 
 import pkg_resources
 from cassandra.cluster import Cluster
-from cassandra.policies import TokenAwarePolicy, DCAwareRoundRobinPolicy
+from cassandra.policies import TokenAwarePolicy, DCAwareRoundRobinPolicy, WhiteListRoundRobinPolicy
 
 from webservice.NexusHandler import nexus_initializer
 
-
 @nexus_initializer
 class DomsInitializer:
     def __init__(self):
@@ -34,24 +33,29 @@ class DomsInitializer:
         log = logging.getLogger(__name__)
         log.info("*** STARTING DOMS INITIALIZATION ***")
 
-        domsconfig = ConfigParser.RawConfigParser()
-        domsconfig.readfp(pkg_resources.resource_stream(__name__, "domsconfig.ini"), filename='domsconfig.ini')
+        domsconfig = ConfigParser.SafeConfigParser()
+        domsconfig.read(DomsInitializer._get_config_files('domsconfig.ini'))
 
         cassHost = domsconfig.get("cassandra", "host")
         cassPort = domsconfig.get("cassandra", "port")
         cassKeyspace = domsconfig.get("cassandra", "keyspace")
         cassDatacenter = domsconfig.get("cassandra", "local_datacenter")
         cassVersion = int(domsconfig.get("cassandra", "protocol_version"))
+        cassPolicy = domsconfig.get("cassandra", "dc_policy")
 
         log.info("Cassandra Host(s): %s" % (cassHost))
         log.info("Cassandra Keyspace: %s" % (cassKeyspace))
         log.info("Cassandra Datacenter: %s" % (cassDatacenter))
         log.info("Cassandra Protocol Version: %s" % (cassVersion))
+        log.info("Cassandra DC Policy: %s" % (cassPolicy))
 
-        dc_policy = DCAwareRoundRobinPolicy(cassDatacenter)
+        if cassPolicy == 'DCAwareRoundRobinPolicy':
+            dc_policy = DCAwareRoundRobinPolicy(cassDatacenter)
+        elif cassPolicy == 'WhiteListRoundRobinPolicy':
+            dc_policy = WhiteListRoundRobinPolicy([cassHost])
         token_policy = TokenAwarePolicy(dc_policy)
 
-        with Cluster([host for host in cassHost.split(',')], port=cassPort, load_balancing_policy=token_policy,
+        with Cluster([host for host in cassHost.split(',')], port=int(cassPort), load_balancing_policy=token_policy,
                      protocol_version=cassVersion) as cluster:
             session = cluster.connect()
 
@@ -144,3 +148,17 @@ class DomsInitializer:
             );
         """
         session.execute(cql)
+
+    @staticmethod
+    def _get_config_files(filename):
+        log = logging.getLogger(__name__)
+        candidates = []
+        extensions = ['.default', '']
+        for extension in extensions:
+            try:
+                candidate = pkg_resources.resource_filename(__name__, filename + extension)
+                candidates.append(candidate)
+            except KeyError as ke:
+                log.warning('configuration file {} not found'.format(filename + extension))
+
+        return candidates
diff --git a/analysis/webservice/algorithms/doms/MatchupQuery.py b/analysis/webservice/algorithms/doms/MatchupQuery.py
index cff23a0..57a0834 100644
--- a/analysis/webservice/algorithms/doms/MatchupQuery.py
+++ b/analysis/webservice/algorithms/doms/MatchupQuery.py
@@ -32,7 +32,7 @@ from webservice.NexusHandler import nexus_handler
 
 
 @nexus_handler
-class CombinedDomsMatchupQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class CombinedDomsMatchupQueryHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "Experimental Combined DOMS In-Situ Matchup"
     path = "/domsmatchup"
     description = ""
@@ -40,7 +40,7 @@ class CombinedDomsMatchupQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
     singleton = True
 
     def __init__(self):
-        BaseDomsHandler.BaseDomsQueryHandler.__init__(self)
+        BaseDomsHandler.BaseDomsQueryCalcHandler.__init__(self)
 
     def fetchData(self, endpoints, startTime, endTime, bbox, depth_min, depth_max, platforms):
 
diff --git a/analysis/webservice/algorithms/doms/MetadataQuery.py b/analysis/webservice/algorithms/doms/MetadataQuery.py
index 21942a5..aa24d91 100644
--- a/analysis/webservice/algorithms/doms/MetadataQuery.py
+++ b/analysis/webservice/algorithms/doms/MetadataQuery.py
@@ -19,13 +19,13 @@ import requests
 
 import BaseDomsHandler
 import config
-from webservice.NexusHandler import NexusHandler as BaseHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler as BaseHandler
 from webservice.NexusHandler import nexus_handler
 from webservice.webmodel import DatasetNotFoundException
 
 
 @nexus_handler
-class DomsMetadataQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class DomsMetadataQueryHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "DOMS Metadata Listing"
     path = "/domsmetadata"
     description = ""
diff --git a/analysis/webservice/algorithms/doms/ResultsPlotQuery.py b/analysis/webservice/algorithms/doms/ResultsPlotQuery.py
index b7b308a..1b48d14 100644
--- a/analysis/webservice/algorithms/doms/ResultsPlotQuery.py
+++ b/analysis/webservice/algorithms/doms/ResultsPlotQuery.py
@@ -27,7 +27,7 @@ class PlotTypes:
 
 
 @nexus_handler
-class DomsResultsPlotHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class DomsResultsPlotHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "DOMS Results Plotting"
     path = "/domsplot"
     description = ""
@@ -35,7 +35,7 @@ class DomsResultsPlotHandler(BaseDomsHandler.BaseDomsQueryHandler):
     singleton = True
 
     def __init__(self):
-        BaseDomsHandler.BaseDomsQueryHandler.__init__(self)
+        BaseDomsHandler.BaseDomsQueryCalcHandler.__init__(self)
 
     def calc(self, computeOptions, **args):
         id = computeOptions.get_argument("id", None)
diff --git a/analysis/webservice/algorithms/doms/ResultsRetrieval.py b/analysis/webservice/algorithms/doms/ResultsRetrieval.py
index 76e5fe2..93358e9 100644
--- a/analysis/webservice/algorithms/doms/ResultsRetrieval.py
+++ b/analysis/webservice/algorithms/doms/ResultsRetrieval.py
@@ -22,7 +22,7 @@ from webservice.webmodel import NexusProcessingException
 
 
 @nexus_handler
-class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "DOMS Resultset Retrieval"
     path = "/domsresults"
     description = ""
@@ -30,7 +30,7 @@ class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryHandler):
     singleton = True
 
     def __init__(self):
-        BaseDomsHandler.BaseDomsQueryHandler.__init__(self)
+        BaseDomsHandler.BaseDomsQueryCalcHandler.__init__(self)
 
     def calc(self, computeOptions, **args):
         execution_id = computeOptions.get_argument("id", None)
diff --git a/analysis/webservice/algorithms/doms/StatsQuery.py b/analysis/webservice/algorithms/doms/StatsQuery.py
index 33ffbf9..f5ac765 100644
--- a/analysis/webservice/algorithms/doms/StatsQuery.py
+++ b/analysis/webservice/algorithms/doms/StatsQuery.py
@@ -15,12 +15,12 @@
 
 import BaseDomsHandler
 import datafetch
-from webservice.NexusHandler import NexusHandler as BaseHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler as BaseHandler
 from webservice.NexusHandler import nexus_handler
 
 
 @nexus_handler
-class DomsStatsQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class DomsStatsQueryHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "DOMS In-Situ Stats Lookup"
     path = "/domsstats"
     description = ""
diff --git a/analysis/webservice/algorithms/doms/ValuesQuery.py b/analysis/webservice/algorithms/doms/ValuesQuery.py
index 1323ab5..d766c7b 100644
--- a/analysis/webservice/algorithms/doms/ValuesQuery.py
+++ b/analysis/webservice/algorithms/doms/ValuesQuery.py
@@ -19,14 +19,14 @@ from pytz import timezone
 
 import BaseDomsHandler
 import datafetch
-from webservice.NexusHandler import NexusHandler as BaseHandler
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler as BaseHandler
 from webservice.NexusHandler import nexus_handler
 
 EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
 
 
 @nexus_handler
-class DomsValuesQueryHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class DomsValuesQueryHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "DOMS In-Situ Value Lookup"
     path = "/domsvalues"
     description = ""
diff --git a/analysis/webservice/algorithms/doms/domsconfig.ini b/analysis/webservice/algorithms/doms/domsconfig.ini.default
similarity index 89%
rename from analysis/webservice/algorithms/doms/domsconfig.ini
rename to analysis/webservice/algorithms/doms/domsconfig.ini.default
index 34712b4..d1814bf 100644
--- a/analysis/webservice/algorithms/doms/domsconfig.ini
+++ b/analysis/webservice/algorithms/doms/domsconfig.ini.default
@@ -4,6 +4,7 @@ port=9042
 keyspace=doms
 local_datacenter=datacenter1
 protocol_version=3
+dc_policy=DCAwareRoundRobinPolicy
 
 
 [cassandraDD]
diff --git a/analysis/webservice/algorithms/doms/histogramplot.py b/analysis/webservice/algorithms/doms/histogramplot.py
index 49015b7..1e06b66 100644
--- a/analysis/webservice/algorithms/doms/histogramplot.py
+++ b/analysis/webservice/algorithms/doms/histogramplot.py
@@ -25,7 +25,8 @@ import numpy as np
 import BaseDomsHandler
 import ResultsStorage
 
-matplotlib.use('Agg')
+if not matplotlib.get_backend():
+    matplotlib.use('Agg')
 
 PARAMETER_TO_FIELD = {
     "sst": "sea_water_temperature",
diff --git a/analysis/webservice/algorithms/doms/insitusubset.py b/analysis/webservice/algorithms/doms/insitusubset.py
index ae11013..7f60e99 100644
--- a/analysis/webservice/algorithms/doms/insitusubset.py
+++ b/analysis/webservice/algorithms/doms/insitusubset.py
@@ -30,7 +30,7 @@ ISO_8601 = '%Y-%m-%dT%H:%M:%S%z'
 
 
 @nexus_handler
-class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "DOMS In Situ Subsetter"
     path = "/domsinsitusubset"
     description = "Subset a DOMS in situ source given the search domain."
@@ -109,7 +109,7 @@ class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryHandler):
     singleton = True
 
     def __init__(self):
-        BaseDomsHandler.BaseDomsQueryHandler.__init__(self)
+        BaseDomsHandler.BaseDomsQueryCalcHandler.__init__(self)
         self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
diff --git a/analysis/webservice/algorithms/doms/mapplot.py b/analysis/webservice/algorithms/doms/mapplot.py
index 0c6faa4..3af85d3 100644
--- a/analysis/webservice/algorithms/doms/mapplot.py
+++ b/analysis/webservice/algorithms/doms/mapplot.py
@@ -25,7 +25,8 @@ from mpl_toolkits.basemap import Basemap
 import BaseDomsHandler
 import ResultsStorage
 
-matplotlib.use('Agg')
+if not matplotlib.get_backend():
+    matplotlib.use('Agg')
 
 PARAMETER_TO_FIELD = {
     "sst": "sea_water_temperature",
diff --git a/analysis/webservice/algorithms/doms/scatterplot.py b/analysis/webservice/algorithms/doms/scatterplot.py
index 6d363d8..2ff57ee 100644
--- a/analysis/webservice/algorithms/doms/scatterplot.py
+++ b/analysis/webservice/algorithms/doms/scatterplot.py
@@ -23,7 +23,8 @@ import matplotlib.pyplot as plt
 import BaseDomsHandler
 import ResultsStorage
 
-matplotlib.use('Agg')
+if not matplotlib.get_backend():
+    matplotlib.use('Agg')
 
 PARAMETER_TO_FIELD = {
     "sst": "sea_water_temperature",
diff --git a/analysis/webservice/algorithms/doms/subsetter.py b/analysis/webservice/algorithms/doms/subsetter.py
index 8cda152..67a2276 100644
--- a/analysis/webservice/algorithms/doms/subsetter.py
+++ b/analysis/webservice/algorithms/doms/subsetter.py
@@ -33,7 +33,7 @@ def is_blank(my_string):
 
 
 @nexus_handler
-class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryHandler):
+class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryCalcHandler):
     name = "DOMS Subsetter"
     path = "/domssubset"
     description = "Subset DOMS sources given the search domain"
@@ -94,7 +94,7 @@ class DomsResultsRetrievalHandler(BaseDomsHandler.BaseDomsQueryHandler):
     singleton = True
 
     def __init__(self):
-        BaseDomsHandler.BaseDomsQueryHandler.__init__(self)
+        BaseDomsHandler.BaseDomsQueryCalcHandler.__init__(self)
         self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
diff --git a/analysis/webservice/algorithms_spark/ClimMapSpark.py b/analysis/webservice/algorithms_spark/ClimMapSpark.py
index 75c7b73..e870a2a 100644
--- a/analysis/webservice/algorithms_spark/ClimMapSpark.py
+++ b/analysis/webservice/algorithms_spark/ClimMapSpark.py
@@ -22,22 +22,17 @@ from datetime import datetime
 import numpy as np
 from nexustiles.nexustiles import NexusTileService
 
-from webservice.NexusHandler import nexus_handler, SparkHandler, DEFAULT_PARAMETERS_SPEC
+from webservice.NexusHandler import nexus_handler, DEFAULT_PARAMETERS_SPEC
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.webmodel import NexusResults, NexusProcessingException, NoDataException
 
 
 @nexus_handler
-class ClimMapSparkHandlerImpl(SparkHandler):
+class ClimMapNexusSparkHandlerImpl(NexusCalcSparkHandler):
     name = "Climatology Map Spark"
     path = "/climMapSpark"
     description = "Computes a Latitude/Longitude Time Average map for a given month given an arbitrary geographical area and year range"
     params = DEFAULT_PARAMETERS_SPEC
-    singleton = True
-
-    def __init__(self):
-        SparkHandler.__init__(self)
-        self.log = logging.getLogger(__name__)
-        # self.log.setLevel(logging.DEBUG)
 
     @staticmethod
     def _map(tile_in_spark):
@@ -66,13 +61,13 @@ class ClimMapSparkHandlerImpl(SparkHandler):
             # print 'nexus call start at time %f' % t1
             # sys.stdout.flush()
             nexus_tiles = \
-                ClimMapSparkHandlerImpl.query_by_parts(tile_service,
-                                                       min_lat, max_lat,
-                                                       min_lon, max_lon,
-                                                       ds,
-                                                       t_start,
-                                                       t_end,
-                                                       part_dim=2)
+                ClimMapNexusSparkHandlerImpl.query_by_parts(tile_service,
+                                                            min_lat, max_lat,
+                                                            min_lon, max_lon,
+                                                            ds,
+                                                            t_start,
+                                                            t_end,
+                                                            part_dim=2)
             # nexus_tiles = \
             #    tile_service.get_tiles_bounded_by_box(min_lat, max_lat, 
             #                                          min_lon, max_lon, 
diff --git a/analysis/webservice/algorithms_spark/CorrMapSpark.py b/analysis/webservice/algorithms_spark/CorrMapSpark.py
index 6627536..1af8cab 100644
--- a/analysis/webservice/algorithms_spark/CorrMapSpark.py
+++ b/analysis/webservice/algorithms_spark/CorrMapSpark.py
@@ -22,22 +22,17 @@ import numpy as np
 from nexustiles.nexustiles import NexusTileService
 
 # from time import time
-from webservice.NexusHandler import nexus_handler, SparkHandler, DEFAULT_PARAMETERS_SPEC
+from webservice.NexusHandler import nexus_handler, DEFAULT_PARAMETERS_SPEC
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.webmodel import NexusProcessingException, NexusResults, NoDataException
 
 
 @nexus_handler
-class CorrMapSparkHandlerImpl(SparkHandler):
+class CorrMapNexusSparkHandlerImpl(NexusCalcSparkHandler):
     name = "Correlation Map Spark"
     path = "/corrMapSpark"
     description = "Computes a correlation map between two datasets given an arbitrary geographical area and time range"
     params = DEFAULT_PARAMETERS_SPEC
-    singleton = True
-
-    def __init__(self):
-        SparkHandler.__init__(self)
-        self.log = logging.getLogger(__name__)
-        # self.log.setLevel(logging.DEBUG)
 
     @staticmethod
     def _map(tile_in):
diff --git a/analysis/webservice/algorithms_spark/DailyDifferenceAverageSpark.py b/analysis/webservice/algorithms_spark/DailyDifferenceAverageSpark.py
index d164532..51be431 100644
--- a/analysis/webservice/algorithms_spark/DailyDifferenceAverageSpark.py
+++ b/analysis/webservice/algorithms_spark/DailyDifferenceAverageSpark.py
@@ -23,7 +23,8 @@ from nexustiles.nexustiles import NexusTileService
 from shapely import wkt
 from shapely.geometry import Polygon
 
-from webservice.NexusHandler import nexus_handler, SparkHandler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.webmodel import NexusResults, NexusProcessingException
 
 SENTINEL = 'STOP'
@@ -37,7 +38,7 @@ def iso_time_to_epoch(str_time):
 
 
 @nexus_handler
-class DailyDifferenceAverageSparkImpl(SparkHandler):
+class DailyDifferenceAverageNexusImplSpark(NexusCalcSparkHandler):
     name = "Daily Difference Average Spark"
     path = "/dailydifferenceaverage_spark"
     description = "Subtracts data in box in Dataset 1 from Dataset 2, then averages the difference per day."
@@ -70,8 +71,8 @@ class DailyDifferenceAverageSparkImpl(SparkHandler):
     }
     singleton = True
 
-    def __init__(self):
-        SparkHandler.__init__(self, skipCassandra=True)
+    def __init__(self, **kwargs):
+        NexusCalcSparkHandler.__init__(self, skipCassandra=True, **kwargs)
         self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
diff --git a/analysis/webservice/algorithms_spark/HofMoellerSpark.py b/analysis/webservice/algorithms_spark/HofMoellerSpark.py
index 12320b1..c4bc019 100644
--- a/analysis/webservice/algorithms_spark/HofMoellerSpark.py
+++ b/analysis/webservice/algorithms_spark/HofMoellerSpark.py
@@ -27,7 +27,8 @@ from matplotlib import cm
 from matplotlib.ticker import FuncFormatter
 from nexustiles.nexustiles import NexusTileService
 from pytz import timezone
-from webservice.NexusHandler import SparkHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.webmodel import NexusResults, NoDataException, NexusProcessingException
 
 EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
@@ -100,7 +101,7 @@ class HofMoellerCalculator(object):
         return stats
 
 
-class BaseHoffMoellerHandlerImpl(SparkHandler):
+class BaseHoffMoellerSparkHandlerImpl(NexusCalcSparkHandler):
     params = {
         "ds": {
             "name": "Dataset",
@@ -132,10 +133,6 @@ class BaseHoffMoellerHandlerImpl(SparkHandler):
         }
     }
 
-    def __init__(self):
-        SparkHandler.__init__(self)
-        self.log = logging.getLogger(__name__)
-
     def parse_arguments(self, request):
         # Parse input arguments
         self.log.debug("Parsing arguments")
@@ -331,16 +328,16 @@ def spark_driver(sc, latlon, nexus_tiles_spark, metrics_callback):
 
 
 @nexus_handler
-class LatitudeTimeHoffMoellerSparkHandlerImpl(BaseHoffMoellerHandlerImpl):
+class LatitudeTimeHoffMoellerSparkHandlerImpl(BaseHoffMoellerSparkHandlerImpl):
     name = "Latitude/Time HofMoeller Spark"
     path = "/latitudeTimeHofMoellerSpark"
     description = "Computes a latitude/time HofMoeller plot given an arbitrary geographical area and time range"
-    params = BaseHoffMoellerHandlerImpl.params
+    params = BaseHoffMoellerSparkHandlerImpl.params
     singleton = True
 
-    def __init__(self):
+    def __init__(self, **kwargs):
         self._latlon = 0  # 0 for latitude-time map, 1 for longitude-time map
-        BaseHoffMoellerHandlerImpl.__init__(self)
+        BaseHoffMoellerSparkHandlerImpl.__init__(self, **kwargs)
 
     def calc(self, compute_options, **args):
         ds, bbox, start_time, end_time = self.parse_arguments(compute_options)
@@ -382,16 +379,16 @@ class LatitudeTimeHoffMoellerSparkHandlerImpl(BaseHoffMoellerHandlerImpl):
 
 
 @nexus_handler
-class LongitudeTimeHoffMoellerSparkHandlerImpl(BaseHoffMoellerHandlerImpl):
+class LongitudeTimeHoffMoellerSparkHandlerImpl(BaseHoffMoellerSparkHandlerImpl):
     name = "Longitude/Time HofMoeller Spark"
     path = "/longitudeTimeHofMoellerSpark"
     description = "Computes a longitude/time HofMoeller plot given an arbitrary geographical area and time range"
-    params = BaseHoffMoellerHandlerImpl.params
+    params = BaseHoffMoellerSparkHandlerImpl.params
     singleton = True
 
-    def __init__(self):
+    def __init__(self, **kwargs):
         self._latlon = 1  # 0 for latitude-time map; 1 for longitude-time map
-        BaseHoffMoellerHandlerImpl.__init__(self)
+        BaseHoffMoellerSparkHandlerImpl.__init__(self, **kwargs)
 
     def calc(self, compute_options, **args):
         ds, bbox, start_time, end_time = self.parse_arguments(compute_options)
diff --git a/analysis/webservice/algorithms_spark/Matchup.py b/analysis/webservice/algorithms_spark/Matchup.py
index 17d14ec..9ae7557 100644
--- a/analysis/webservice/algorithms_spark/Matchup.py
+++ b/analysis/webservice/algorithms_spark/Matchup.py
@@ -33,7 +33,8 @@ from shapely.geometry import Point
 from shapely.geometry import box
 from shapely.geos import WKTReadingError
 
-from webservice.NexusHandler import SparkHandler, nexus_handler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.algorithms.doms import config as edge_endpoints
 from webservice.algorithms.doms import values as doms_values
 from webservice.algorithms.doms.BaseDomsHandler import DomsQueryResults
@@ -50,7 +51,7 @@ def iso_time_to_epoch(str_time):
 
 
 @nexus_handler
-class Matchup(SparkHandler):
+class Matchup(NexusCalcSparkHandler):
     name = "Matchup"
     path = "/match_spark"
     description = "Match measurements between two or more datasets"
@@ -130,8 +131,8 @@ class Matchup(SparkHandler):
     }
     singleton = True
 
-    def __init__(self):
-        SparkHandler.__init__(self, skipCassandra=True)
+    def __init__(self, algorithm_config=None, sc=None):
+        NexusCalcSparkHandler.__init__(self, algorithm_config=algorithm_config, sc=sc, skipCassandra=True)
         self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
diff --git a/analysis/webservice/algorithms_spark/MaximaMinimaSpark.py b/analysis/webservice/algorithms_spark/MaximaMinimaSpark.py
index 5cbf115..3bd9698 100644
--- a/analysis/webservice/algorithms_spark/MaximaMinimaSpark.py
+++ b/analysis/webservice/algorithms_spark/MaximaMinimaSpark.py
@@ -23,7 +23,8 @@ import shapely.geometry
 from nexustiles.nexustiles import NexusTileService
 from pytz import timezone
 
-from webservice.NexusHandler import nexus_handler, SparkHandler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.webmodel import NexusResults, NexusProcessingException, NoDataException
 
 EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
@@ -31,7 +32,7 @@ ISO_8601 = '%Y-%m-%dT%H:%M:%S%z'
 
 
 @nexus_handler
-class MaximaMinimaSparkHandlerImpl(SparkHandler):
+class MaximaMinimaSparkHandlerImpl(NexusCalcSparkHandler):
     name = "Maxima and Minima Map Spark"
     path = "/maxMinMapSpark"
     description = "Computes a map of maxmima and minima of a field given an arbitrary geographical area and time range"
@@ -65,11 +66,6 @@ class MaximaMinimaSparkHandlerImpl(SparkHandler):
                            "Number of Spark Partitions is used by this function. Optional (Default: local,1,1)"
         }
     }
-    singleton = True
-
-    def __init__(self):
-        SparkHandler.__init__(self)
-        self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
         # Parse input arguments
diff --git a/analysis/webservice/NexusHandler.py b/analysis/webservice/algorithms_spark/NexusCalcSparkHandler.py
similarity index 51%
copy from analysis/webservice/NexusHandler.py
copy to analysis/webservice/algorithms_spark/NexusCalcSparkHandler.py
index 1fc035d..12b84c1 100644
--- a/analysis/webservice/NexusHandler.py
+++ b/analysis/webservice/algorithms_spark/NexusCalcSparkHandler.py
@@ -1,273 +1,13 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
 import logging
-import time
-import types
 
 import numpy as np
-from netCDF4 import Dataset
-from nexustiles.nexustiles import NexusTileService
-from webservice.metrics.MetricsField import NumberMetricsField, SparkAccumulatorMetricsField
-from webservice.metrics.MetricsRecord import MetricsRecord
+from netCDF4._netCDF4 import Dataset
+from webservice.algorithms.NexusCalcHandler import NexusCalcHandler
+from webservice.metrics import MetricsRecord, SparkAccumulatorMetricsField, NumberMetricsField
 from webservice.webmodel import NexusProcessingException
 
-AVAILABLE_HANDLERS = []
-AVAILABLE_INITIALIZERS = []
-
-
-def nexus_initializer(clazz):
-    log = logging.getLogger(__name__)
-    try:
-        wrapper = NexusInitializerWrapper(clazz)
-        log.info("Adding initializer '%s'" % wrapper.clazz())
-        AVAILABLE_INITIALIZERS.append(wrapper)
-    except Exception as ex:
-        log.warn("Initializer '%s' failed to load (reason: %s)" % (clazz, ex.message), exc_info=True)
-    return clazz
-
-
-def nexus_handler(clazz):
-    log = logging.getLogger(__name__)
-    try:
-        wrapper = AlgorithmModuleWrapper(clazz)
-        log.info("Adding algorithm module '%s' with path '%s' (%s)" % (wrapper.name(), wrapper.path(), wrapper.clazz()))
-        AVAILABLE_HANDLERS.append(wrapper)
-    except Exception as ex:
-        log.warn("Handler '%s' is invalid and will be skipped (reason: %s)" % (clazz, ex.message), exc_info=True)
-    return clazz
-
-
-DEFAULT_PARAMETERS_SPEC = {
-    "ds": {
-        "name": "Dataset",
-        "type": "string",
-        "description": "One or more comma-separated dataset shortnames"
-    },
-    "minLat": {
-        "name": "Minimum Latitude",
-        "type": "float",
-        "description": "Minimum (Southern) bounding box Latitude"
-    },
-    "maxLat": {
-        "name": "Maximum Latitude",
-        "type": "float",
-        "description": "Maximum (Northern) bounding box Latitude"
-    },
-    "minLon": {
-        "name": "Minimum Longitude",
-        "type": "float",
-        "description": "Minimum (Western) bounding box Longitude"
-    },
-    "maxLon": {
-        "name": "Maximum Longitude",
-        "type": "float",
-        "description": "Maximum (Eastern) bounding box Longitude"
-    },
-    "startTime": {
-        "name": "Start Time",
-        "type": "long integer",
-        "description": "Starting time in milliseconds since midnight Jan. 1st, 1970 UTC"
-    },
-    "endTime": {
-        "name": "End Time",
-        "type": "long integer",
-        "description": "Ending time in milliseconds since midnight Jan. 1st, 1970 UTC"
-    },
-    "lowPassFilter": {
-        "name": "Apply Low Pass Filter",
-        "type": "boolean",
-        "description": "Specifies whether to apply a low pass filter on the analytics results"
-    },
-    "seasonalFilter": {
-        "name": "Apply Seasonal Filter",
-        "type": "boolean",
-        "description": "Specified whether to apply a seasonal cycle filter on the analytics results"
-    }
-}
-
-
-class NexusInitializerWrapper:
-    def __init__(self, clazz):
-        self.__log = logging.getLogger(__name__)
-        self.__hasBeenRun = False
-        self.__clazz = clazz
-        self.validate()
-
-    def validate(self):
-        if "init" not in self.__clazz.__dict__ or not type(self.__clazz.__dict__["init"]) == types.FunctionType:
-            raise Exception("Method 'init' has not been declared")
-
-    def clazz(self):
-        return self.__clazz
-
-    def hasBeenRun(self):
-        return self.__hasBeenRun
-
-    def init(self, config):
-        if not self.__hasBeenRun:
-            self.__hasBeenRun = True
-            instance = self.__clazz()
-            instance.init(config)
-        else:
-            self.log("Initializer '%s' has already been run" % self.__clazz)
-
-
-class AlgorithmModuleWrapper:
-    def __init__(self, clazz):
-        self.__instance = None
-        self.__clazz = clazz
-        self.validate()
-
-    def validate(self):
-        if "calc" not in self.__clazz.__dict__ or not type(self.__clazz.__dict__["calc"]) == types.FunctionType:
-            raise Exception("Method 'calc' has not been declared")
-
-        if "path" not in self.__clazz.__dict__:
-            raise Exception("Property 'path' has not been defined")
-
-        if "name" not in self.__clazz.__dict__:
-            raise Exception("Property 'name' has not been defined")
-
-        if "description" not in self.__clazz.__dict__:
-            raise Exception("Property 'description' has not been defined")
-
-        if "params" not in self.__clazz.__dict__:
-            raise Exception("Property 'params' has not been defined")
-
-    def clazz(self):
-        return self.__clazz
-
-    def name(self):
-        return self.__clazz.name
-
-    def path(self):
-        return self.__clazz.path
-
-    def description(self):
-        return self.__clazz.description
-
-    def params(self):
-        return self.__clazz.params
-
-    def instance(self, algorithm_config=None, sc=None):
-        if "singleton" in self.__clazz.__dict__ and self.__clazz.__dict__["singleton"] is True:
-            if self.__instance is None:
-                self.__instance = self.__clazz()
-
-                try:
-                    self.__instance.set_config(algorithm_config)
-                except AttributeError:
-                    pass
-
-                try:
-                    self.__instance.set_spark_context(sc)
-                except AttributeError:
-                    pass
-
-            return self.__instance
-        else:
-            instance = self.__clazz()
-
-            try:
-                instance.set_config(algorithm_config)
-            except AttributeError:
-                pass
-
-            try:
-                self.__instance.set_spark_context(sc)
-            except AttributeError:
-                pass
-            return instance
-
-    def isValid(self):
-        try:
-            self.validate()
-            return True
-        except Exception as ex:
-            return False
-
-
-class CalcHandler(object):
-    def calc(self, computeOptions, **args):
-        raise Exception("calc() not yet implemented")
 
-
-class NexusHandler(CalcHandler):
-    def __init__(self, skipCassandra=False, skipSolr=False):
-        CalcHandler.__init__(self)
-
-        self.algorithm_config = None
-        self._skipCassandra = skipCassandra
-        self._skipSolr = skipSolr
-        self.__tile_service = None  # instantiate the tile service after config is fully loaded
-
-    def set_config(self, algorithm_config):
-        self.algorithm_config = algorithm_config
-
-    def _get_tile_service(self):
-        if self.__tile_service is None:
-            self.__tile_service = NexusTileService(skipDatastore=self._skipCassandra,
-                                                   skipMetadatastore=self._skipSolr,
-                                                   config=self.algorithm_config)
-        return self.__tile_service
-
-
-    def _mergeDicts(self, x, y):
-        z = x.copy()
-        z.update(y)
-        return z
-
-    def _now(self):
-        millis = int(round(time.time() * 1000))
-        return millis
-
-    def _mergeDataSeries(self, resultsData, dataNum, resultsMap):
-
-        for entry in resultsData:
-
-            # frmtdTime = datetime.fromtimestamp(entry["time"] ).strftime("%Y-%m")
-            frmtdTime = entry["time"]
-
-            if not frmtdTime in resultsMap:
-                resultsMap[frmtdTime] = []
-            entry["ds"] = dataNum
-            resultsMap[frmtdTime].append(entry)
-
-    def _resultsMapToList(self, resultsMap):
-        resultsList = []
-        for key, value in resultsMap.iteritems():
-            resultsList.append(value)
-
-        resultsList = sorted(resultsList, key=lambda entry: entry[0]["time"])
-        return resultsList
-
-    def _mergeResults(self, resultsRaw):
-        resultsMap = {}
-
-        for i in range(0, len(resultsRaw)):
-            resultsSeries = resultsRaw[i]
-            resultsData = resultsSeries[0]
-            self._mergeDataSeries(resultsData, i, resultsMap)
-
-        resultsList = self._resultsMapToList(resultsMap)
-        return resultsList
-
-
-class SparkHandler(NexusHandler):
+class NexusCalcSparkHandler(NexusCalcHandler):
     class SparkJobContext(object):
 
         class MaxConcurrentJobsReached(Exception):
@@ -284,7 +24,7 @@ class SparkHandler(NexusHandler):
                 self.job_name = self.spark_job_stack.pop()
                 self.log.debug("Using %s" % self.job_name)
             except IndexError:
-                raise SparkHandler.SparkJobContext.MaxConcurrentJobsReached()
+                raise NexusCalcSparkHandler.SparkJobContext.MaxConcurrentJobsReached()
             return self
 
         def __exit__(self, exc_type, exc_val, exc_tb):
@@ -292,12 +32,16 @@ class SparkHandler(NexusHandler):
                 self.log.debug("Returning %s" % self.job_name)
                 self.spark_job_stack.append(self.job_name)
 
-    def __init__(self, **kwargs):
+    def __init__(self, algorithm_config=None, sc=None, **kwargs):
         import inspect
-        NexusHandler.__init__(self, **kwargs)
-        self._sc = None
 
+        NexusCalcHandler.__init__(self, algorithm_config=algorithm_config, **kwargs)
         self.spark_job_stack = []
+        self._sc = sc
+        max_concurrent_jobs = algorithm_config.getint("spark", "maxconcurrentjobs") if algorithm_config.has_section(
+            "spark") and algorithm_config.has_option("spark", "maxconcurrentjobs") else 10
+        self.spark_job_stack = list(["Job %s" % x for x in xrange(1, max_concurrent_jobs + 1)])
+        self.log = logging.getLogger(__name__)
 
         def with_spark_job_context(calc_func):
             from functools import wraps
@@ -305,12 +49,12 @@ class SparkHandler(NexusHandler):
             @wraps(calc_func)
             def wrapped(*args, **kwargs1):
                 try:
-                    with SparkHandler.SparkJobContext(self.spark_job_stack) as job_context:
+                    with NexusCalcSparkHandler.SparkJobContext(self.spark_job_stack) as job_context:
                         # TODO Pool and Job are forced to a 1-to-1 relationship
                         calc_func.im_self._sc.setLocalProperty("spark.scheduler.pool", job_context.job_name)
                         calc_func.im_self._sc.setJobGroup(job_context.job_name, "a spark job")
                         return calc_func(*args, **kwargs1)
-                except SparkHandler.SparkJobContext.MaxConcurrentJobsReached:
+                except NexusCalcSparkHandler.SparkJobContext.MaxConcurrentJobsReached:
                     raise NexusProcessingException(code=503,
                                                    reason="Max concurrent requests reached. Please try again later.")
 
@@ -320,15 +64,6 @@ class SparkHandler(NexusHandler):
             if member[0] == "calc":
                 setattr(self, member[0], with_spark_job_context(member[1]))
 
-    def set_spark_context(self, sc):
-        self._sc = sc
-
-    def set_config(self, algorithm_config):
-        max_concurrent_jobs = algorithm_config.getint("spark", "maxconcurrentjobs") if algorithm_config.has_section(
-            "spark") and algorithm_config.has_option("spark", "maxconcurrentjobs") else 10
-        self.spark_job_stack = list(["Job %s" % x for x in xrange(1, max_concurrent_jobs + 1)])
-        self.algorithm_config = algorithm_config
-
     def _setQueryParams(self, ds, bounds, start_time=None, end_time=None,
                         start_year=None, end_year=None, clim_month=None,
                         fill=-9999.):
@@ -387,7 +122,7 @@ class SparkHandler(NexusHandler):
             ds = self._ds
 
         # See what time stamps are in the specified range.
-        t_in_range = self._get_tile_service().find_days_in_range_asc(self._minLat,
+        t_in_range = self._tile_service.find_days_in_range_asc(self._minLat,
                                                                self._maxLat,
                                                                self._minLon,
                                                                self._maxLon,
@@ -403,7 +138,7 @@ class SparkHandler(NexusHandler):
         # Check one time stamp at a time and attempt to extract the global
         # tile set.
         for t in t_in_range:
-            nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon,
+            nexus_tiles = self._tile_service.get_tiles_bounded_by_box(self._minLat, self._maxLat, self._minLon,
                                                                       self._maxLon, ds=ds, start_time=t, end_time=t,
                                                                       metrics_callback=metrics_callback)
             if self._set_info_from_tile_set(nexus_tiles):
@@ -454,57 +189,57 @@ class SparkHandler(NexusHandler):
             if part_dim == 0:
                 # Partition by latitude.
                 mid_lat = (min_lat + max_lat) / 2
-                nexus_tiles = SparkHandler.query_by_parts(tile_service,
-                                                          min_lat, mid_lat,
-                                                          min_lon, max_lon,
-                                                          dataset,
-                                                          start_time, end_time,
-                                                          part_dim=part_dim)
-                nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
-                                                               mid_lat,
-                                                               max_lat,
-                                                               min_lon,
-                                                               max_lon,
-                                                               dataset,
-                                                               start_time,
-                                                               end_time,
-                                                               part_dim=part_dim))
+                nexus_tiles = NexusCalcSparkHandler.query_by_parts(tile_service,
+                                                                   min_lat, mid_lat,
+                                                                   min_lon, max_lon,
+                                                                   dataset,
+                                                                   start_time, end_time,
+                                                                   part_dim=part_dim)
+                nexus_tiles.extend(NexusCalcSparkHandler.query_by_parts(tile_service,
+                                                                        mid_lat,
+                                                                        max_lat,
+                                                                        min_lon,
+                                                                        max_lon,
+                                                                        dataset,
+                                                                        start_time,
+                                                                        end_time,
+                                                                        part_dim=part_dim))
             elif part_dim == 1:
                 # Partition by longitude.
                 mid_lon = (min_lon + max_lon) / 2
-                nexus_tiles = SparkHandler.query_by_parts(tile_service,
-                                                          min_lat, max_lat,
-                                                          min_lon, mid_lon,
-                                                          dataset,
-                                                          start_time, end_time,
-                                                          part_dim=part_dim)
-                nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
-                                                               min_lat,
-                                                               max_lat,
-                                                               mid_lon,
-                                                               max_lon,
-                                                               dataset,
-                                                               start_time,
-                                                               end_time,
-                                                               part_dim=part_dim))
+                nexus_tiles = NexusCalcSparkHandler.query_by_parts(tile_service,
+                                                                   min_lat, max_lat,
+                                                                   min_lon, mid_lon,
+                                                                   dataset,
+                                                                   start_time, end_time,
+                                                                   part_dim=part_dim)
+                nexus_tiles.extend(NexusCalcSparkHandler.query_by_parts(tile_service,
+                                                                        min_lat,
+                                                                        max_lat,
+                                                                        mid_lon,
+                                                                        max_lon,
+                                                                        dataset,
+                                                                        start_time,
+                                                                        end_time,
+                                                                        part_dim=part_dim))
             elif part_dim == 2:
                 # Partition by time.
                 mid_time = (start_time + end_time) / 2
-                nexus_tiles = SparkHandler.query_by_parts(tile_service,
-                                                          min_lat, max_lat,
-                                                          min_lon, max_lon,
-                                                          dataset,
-                                                          start_time, mid_time,
-                                                          part_dim=part_dim)
-                nexus_tiles.extend(SparkHandler.query_by_parts(tile_service,
-                                                               min_lat,
-                                                               max_lat,
-                                                               min_lon,
-                                                               max_lon,
-                                                               dataset,
-                                                               mid_time,
-                                                               end_time,
-                                                               part_dim=part_dim))
+                nexus_tiles = NexusCalcSparkHandler.query_by_parts(tile_service,
+                                                                   min_lat, max_lat,
+                                                                   min_lon, max_lon,
+                                                                   dataset,
+                                                                   start_time, mid_time,
+                                                                   part_dim=part_dim)
+                nexus_tiles.extend(NexusCalcSparkHandler.query_by_parts(tile_service,
+                                                                        min_lat,
+                                                                        max_lat,
+                                                                        min_lon,
+                                                                        max_lon,
+                                                                        dataset,
+                                                                        mid_time,
+                                                                        end_time,
+                                                                        part_dim=part_dim))
         else:
             # No exception, so query Cassandra for the tile data.
             # print 'Making NEXUS query to Cassandra for %d tiles...' % \
@@ -614,8 +349,4 @@ class SparkHandler(NexusHandler):
                                          accumulator=self._sc.accumulator(0)),
             NumberMetricsField(key='reduce', description='Actual time to reduce results'),
             NumberMetricsField(key="actual_time", description="Total (actual) time")
-        ])
-
-
-def executeInitializers(config):
-    [wrapper.init(config) for wrapper in AVAILABLE_INITIALIZERS]
+        ])
\ No newline at end of file
diff --git a/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py b/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
index ca430eb..c668130 100644
--- a/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
+++ b/analysis/webservice/algorithms_spark/TimeAvgMapSpark.py
@@ -21,7 +21,8 @@ import numpy as np
 import shapely.geometry
 from nexustiles.nexustiles import NexusTileService
 from pytz import timezone
-from webservice.NexusHandler import nexus_handler, SparkHandler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.webmodel import NexusResults, NexusProcessingException, NoDataException
 
 EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
@@ -29,7 +30,9 @@ ISO_8601 = '%Y-%m-%dT%H:%M:%S%z'
 
 
 @nexus_handler
-class TimeAvgMapSparkHandlerImpl(SparkHandler):
+class TimeAvgMapNexusSparkHandlerImpl(NexusCalcSparkHandler):
+    # __singleton_lock = threading.Lock()
+    # __singleton_instance = None
     name = "Time Average Map Spark"
     path = "/timeAvgMapSpark"
     description = "Computes a Latitude/Longitude Time Average plot given an arbitrary geographical area and time range"
@@ -65,9 +68,18 @@ class TimeAvgMapSparkHandlerImpl(SparkHandler):
     }
     singleton = True
 
-    def __init__(self):
-        SparkHandler.__init__(self)
-        self.log = logging.getLogger(__name__)
+    # @classmethod
+    # def instance(cls, algorithm_config=None, sc=None):
+    #     with cls.__singleton_lock:
+    #         if not cls.__singleton_instance:
+    #             try:
+    #                 singleton_instance = cls()
+    #                 singleton_instance.set_config(algorithm_config)
+    #                 singleton_instance.set_spark_context(sc)
+    #                 cls.__singleton_instance = singleton_instance
+    #             except AttributeError:
+    #                 pass
+    #     return cls.__singleton_instance
 
     def parse_arguments(self, request):
         # Parse input arguments
@@ -87,39 +99,12 @@ class TimeAvgMapSparkHandlerImpl(SparkHandler):
             raise NexusProcessingException(
                 reason="Cannot compute Latitude/Longitude Time Average plot on a climatology", code=400)
 
-        try:
-            bounding_polygon = request.get_bounding_polygon()
-            request.get_min_lon = lambda: bounding_polygon.bounds[0]
-            request.get_min_lat = lambda: bounding_polygon.bounds[1]
-            request.get_max_lon = lambda: bounding_polygon.bounds[2]
-            request.get_max_lat = lambda: bounding_polygon.bounds[3]
-        except:
-            try:
-                west, south, east, north = request.get_min_lon(), request.get_min_lat(), \
-                                           request.get_max_lon(), request.get_max_lat()
-                bounding_polygon = shapely.geometry.Polygon(
-                    [(west, south), (east, south), (east, north), (west, north), (west, south)])
-            except:
-                raise NexusProcessingException(
-                    reason="'b' argument is required. Must be comma-delimited float formatted as "
-                           "Minimum (Western) Longitude, Minimum (Southern) Latitude, "
-                           "Maximum (Eastern) Longitude, Maximum (Northern) Latitude",
-                    code=400)
+        west, south, east, north = request.get_bounding_box()
+        bounding_polygon = shapely.geometry.Polygon(
+            [(west, south), (east, south), (east, north), (west, north), (west, south)])
 
-        try:
-            start_time = request.get_start_datetime()
-        except:
-            raise NexusProcessingException(
-                reason="'startTime' argument is required. Can be int value seconds from epoch or "
-                       "string format YYYY-MM-DDTHH:mm:ssZ",
-                code=400)
-        try:
-            end_time = request.get_end_datetime()
-        except:
-            raise NexusProcessingException(
-                reason="'endTime' argument is required. Can be int value seconds from epoch or "
-                       "string format YYYY-MM-DDTHH:mm:ssZ",
-                code=400)
+        start_time = request.get_start_datetime()
+        end_time = request.get_end_datetime()
 
         if start_time > end_time:
             raise NexusProcessingException(
@@ -259,10 +244,9 @@ class TimeAvgMapSparkHandlerImpl(SparkHandler):
                             tile_min_lon, tile_max_lon,
                             y0, y1, x0, x1))
 
-        reduce_duration += (datetime.now() - reduce_start).total_seconds()
-
-        # Store global map in a NetCDF file.
-        self._create_nc_file(a, 'tam.nc', 'val', fill=self._fill)
+        # Store global map in a NetCDF file for debugging purpose
+        # if activated this line is not thread safe and might cause error when concurrent access occurs
+        # self._create_nc_file(a, 'tam.nc', 'val', fill=self._fill)
 
         # Create dict for JSON response
         results = [[{'mean': a[y, x], 'cnt': int(n[y, x]),
diff --git a/analysis/webservice/algorithms_spark/TimeSeriesSpark.py b/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
index 20f989a..bf5963e 100644
--- a/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
+++ b/analysis/webservice/algorithms_spark/TimeSeriesSpark.py
@@ -32,7 +32,8 @@ from nexustiles.nexustiles import NexusTileService
 from pytz import timezone
 from scipy import stats
 from webservice import Filtering as filtering
-from webservice.NexusHandler import nexus_handler, SparkHandler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.webmodel import NexusResults, NoDataException, NexusProcessingException
 
 EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
@@ -42,7 +43,7 @@ logger = logging.getLogger(__name__)
 
 
 @nexus_handler
-class TimeSeriesHandlerImpl(SparkHandler):
+class TimeSeriesSparkHandlerImpl(NexusCalcSparkHandler):
     name = "Time Series Spark"
     path = "/timeSeriesSpark"
     description = "Computes a time series plot between one or more datasets given an arbitrary geographical area and time range"
@@ -90,10 +91,6 @@ class TimeSeriesHandlerImpl(SparkHandler):
     }
     singleton = True
 
-    def __init__(self):
-        SparkHandler.__init__(self)
-        self.log = logging.getLogger(__name__)
-
     def parse_arguments(self, request):
         # Parse input arguments
         self.log.debug("Parsing arguments")
@@ -249,7 +246,7 @@ class TimeSeriesHandlerImpl(SparkHandler):
 
         if len(ds) == 2:
             try:
-                stats = TimeSeriesHandlerImpl.calculate_comparison_stats(results)
+                stats = TimeSeriesSparkHandlerImpl.calculate_comparison_stats(results)
             except Exception:
                 stats = {}
                 tb = traceback.format_exc()
diff --git a/analysis/webservice/algorithms_spark/VarianceSpark.py b/analysis/webservice/algorithms_spark/VarianceSpark.py
index 8c96cb7..698385d 100644
--- a/analysis/webservice/algorithms_spark/VarianceSpark.py
+++ b/analysis/webservice/algorithms_spark/VarianceSpark.py
@@ -23,7 +23,8 @@ import shapely.geometry
 from nexustiles.nexustiles import NexusTileService
 from pytz import timezone
 
-from webservice.NexusHandler import nexus_handler, SparkHandler
+from webservice.NexusHandler import nexus_handler
+from webservice.algorithms_spark.NexusCalcSparkHandler import NexusCalcSparkHandler
 from webservice.webmodel import NexusResults, NexusProcessingException, NoDataException
 
 EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
@@ -31,7 +32,7 @@ ISO_8601 = '%Y-%m-%dT%H:%M:%S%z'
 
 
 @nexus_handler
-class VarianceSparkHandlerImpl(SparkHandler):
+class VarianceNexusSparkHandlerImpl(NexusCalcSparkHandler):
     name = "Temporal Variance Spark"
     path = "/varianceSpark"
     description = "Computes a map of the temporal variance"
@@ -67,9 +68,6 @@ class VarianceSparkHandlerImpl(SparkHandler):
     }
     singleton = True
 
-    def __init__(self):
-        SparkHandler.__init__(self)
-        self.log = logging.getLogger(__name__)
 
     def parse_arguments(self, request):
         # Parse input arguments
diff --git a/analysis/webservice/algorithms_spark/__init__.py b/analysis/webservice/algorithms_spark/__init__.py
index 5caa8c0..d6ed83f 100644
--- a/analysis/webservice/algorithms_spark/__init__.py
+++ b/analysis/webservice/algorithms_spark/__init__.py
@@ -16,6 +16,17 @@
 
 import logging
 import os
+import ClimMapSpark
+import CorrMapSpark
+import DailyDifferenceAverageSpark
+import HofMoellerSpark
+import Matchup
+import MaximaMinimaSpark
+import NexusCalcSparkHandler
+import TimeAvgMapSpark
+import TimeSeriesSpark
+import VarianceSpark
+
 
 log = logging.getLogger(__name__)
 
diff --git a/analysis/webservice/nexus_tornado/__init__.py b/analysis/webservice/nexus_tornado/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/analysis/webservice/nexus_tornado/request/__init__.py b/analysis/webservice/nexus_tornado/request/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/analysis/webservice/nexus_tornado/request/handlers/NexusRequestHandler.py b/analysis/webservice/nexus_tornado/request/handlers/NexusRequestHandler.py
new file mode 100644
index 0000000..210c1f3
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/handlers/NexusRequestHandler.py
@@ -0,0 +1,63 @@
+import json
+import logging
+import tornado.gen
+import tornado.ioloop
+
+from webservice.nexus_tornado.request.renderers import NexusRendererFactory
+from webservice.webmodel import NexusRequestObjectTornadoFree, NexusRequestObject, NexusProcessingException
+
+
+class NexusRequestHandler(tornado.web.RequestHandler):
+    def initialize(self, thread_pool, clazz=None, **kargs):
+        self.logger = logging.getLogger('nexus')
+        self.executor = thread_pool
+        self.__clazz = clazz
+        self._clazz_init_args = kargs # 'algorithm_config', 'sc' for spark handler
+
+    @tornado.gen.coroutine
+    def get(self):
+        self.logger.info("Received request %s" % self._request_summary())
+
+        # temporary hack to use a NexusRequestObject without tornado request references
+        # this object only supports timeAvgMapSpark yet.
+        # Will be extended to replace the historical object in the next pull request related to ticket SDAP-252
+        if self.request.path == '/timeAvgMapSpark':
+            request = NexusRequestObjectTornadoFree(self)
+        else:
+            request = NexusRequestObject(self)
+
+        # create NexusCalcHandler which will process the request
+        instance = self.__clazz(**self._clazz_init_args)
+
+        try:
+            # process the request asynchronously on a different thread,
+            # the current tornado handler is still available to get other user requests
+            results = yield tornado.ioloop.IOLoop.current().run_in_executor(self.executor, instance.calc, request)
+
+            try:
+                self.set_status(results.status_code)
+            except AttributeError:
+                pass
+
+            renderer = NexusRendererFactory.get_renderer(request)
+            renderer.render(self, results)
+
+        except NexusProcessingException as e:
+            self.async_onerror_callback(e.reason, e.code)
+
+        except Exception as e:
+            self.async_onerror_callback(str(e), 500)
+
+    def async_onerror_callback(self, reason, code=500):
+        self.logger.error("Error processing request", exc_info=True)
+
+        self.set_header("Content-Type", "application/json")
+        self.set_status(code)
+
+        response = {
+            "error": reason,
+            "code": code
+        }
+
+        self.write(json.dumps(response, indent=5))
+        self.finish()
\ No newline at end of file
diff --git a/analysis/webservice/nexus_tornado/request/handlers/__init__.py b/analysis/webservice/nexus_tornado/request/handlers/__init__.py
new file mode 100644
index 0000000..7c6b1f4
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/handlers/__init__.py
@@ -0,0 +1 @@
+from .NexusRequestHandler import NexusRequestHandler
\ No newline at end of file
diff --git a/analysis/webservice/nexus_tornado/request/renderers/NexusCSVRenderer.py b/analysis/webservice/nexus_tornado/request/renderers/NexusCSVRenderer.py
new file mode 100644
index 0000000..9802dc8
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/renderers/NexusCSVRenderer.py
@@ -0,0 +1,17 @@
+import sys
+import traceback
+from webservice.webmodel import NexusProcessingException
+
+
+class NexusCSVRenderer(object):
+    def __init__(self, nexus_request):
+        self._request = nexus_request
+
+    def render(self, tornado_handler, result):
+        tornado_handler.set_header("Content-Type", "text/csv")
+        tornado_handler.set_header("Content-Disposition", "filename=\"%s\"" % self._request.get_argument('filename', "download.csv"))
+        try:
+            self.write(result.toCSV())
+        except:
+            traceback.print_exc(file=sys.stdout)
+            raise NexusProcessingException(reason="Unable to convert results to CSV.")
\ No newline at end of file
diff --git a/analysis/webservice/nexus_tornado/request/renderers/NexusJSONRenderer.py b/analysis/webservice/nexus_tornado/request/renderers/NexusJSONRenderer.py
new file mode 100644
index 0000000..ebcc742
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/renderers/NexusJSONRenderer.py
@@ -0,0 +1,19 @@
+import sys
+import traceback
+import json
+
+
+class NexusJSONRenderer(object):
+    def __init__(self, nexus_request):
+        self.request = nexus_request
+
+    def render(self, tornado_handler, result):
+        tornado_handler.set_header("Content-Type", "application/json")
+        try:
+            result_str = result.toJson()
+            tornado_handler.write(result_str)
+            tornado_handler.finish()
+        except AttributeError:
+            traceback.print_exc(file=sys.stdout)
+            tornado_handler.write(json.dumps(result, indent=4))
+            tornado_handler.finish()
diff --git a/analysis/webservice/nexus_tornado/request/renderers/NexusNETCDFRenderer.py b/analysis/webservice/nexus_tornado/request/renderers/NexusNETCDFRenderer.py
new file mode 100644
index 0000000..9d6ca92
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/renderers/NexusNETCDFRenderer.py
@@ -0,0 +1,17 @@
+import sys
+import traceback
+from webservice.webmodel import NexusProcessingException
+
+
+class NexusNETCDFRenderer(object):
+    def __init__(self, nexus_request):
+        self._request = nexus_request
+
+    def render(self, tornado_handler, result):
+        tornado_handler.set_header("Content-Type", "application/x-netcdf")
+        tornado_handler.set_header("Content-Disposition", "filename=\"%s\"" % self._request.get_argument('filename', "download.nc"))
+        try:
+            self.write(result.toNetCDF())
+        except:
+            traceback.print_exc(file=sys.stdout)
+            raise NexusProcessingException(reason="Unable to convert results to NetCDF.")
\ No newline at end of file
diff --git a/analysis/webservice/nexus_tornado/request/renderers/NexusPNGRenderer.py b/analysis/webservice/nexus_tornado/request/renderers/NexusPNGRenderer.py
new file mode 100644
index 0000000..3782bf1
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/renderers/NexusPNGRenderer.py
@@ -0,0 +1,17 @@
+import sys
+import traceback
+from webservice.webmodel import NexusProcessingException
+
+
+class NexusPNGRenderer(object):
+    def __init__(self, nexus_request):
+        self._request = nexus_request
+
+    def render(self, tornado_handler, result):
+        tornado_handler.set_header("Content-Type", "image/png")
+        try:
+            tornado_handler.write(result.toImage())
+            tornado_handler.finish()
+        except AttributeError:
+            traceback.print_exc(file=sys.stdout)
+            raise NexusProcessingException(reason="Unable to convert results to an Image.")
\ No newline at end of file
diff --git a/analysis/webservice/nexus_tornado/request/renderers/NexusRendererFactory.py b/analysis/webservice/nexus_tornado/request/renderers/NexusRendererFactory.py
new file mode 100644
index 0000000..9fc06e3
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/renderers/NexusRendererFactory.py
@@ -0,0 +1,16 @@
+class NexusRendererFactory(object):
+    content_types = ["CSV", "JSON", "XML", "PNG", "NETCDF", "ZIP"]
+    module = __import__(__name__)
+
+    @classmethod
+    def get_renderer(cls, request):
+        content_type = request.get_content_type()
+        if content_type in cls.content_types:
+            renderer_name = 'Nexus' + content_type + 'Renderer'
+            renderer = getattr(cls.module.nexus_tornado.request.renderers, renderer_name)
+            return renderer(request)
+
+
+
+
+
diff --git a/analysis/webservice/nexus_tornado/request/renderers/NexusZIPRenderer.py b/analysis/webservice/nexus_tornado/request/renderers/NexusZIPRenderer.py
new file mode 100644
index 0000000..bfa6b69
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/renderers/NexusZIPRenderer.py
@@ -0,0 +1,17 @@
+import sys
+import traceback
+from webservice.webmodel import NexusProcessingException
+
+
+class NexusZIPRenderer(object):
+    def __init__(self, nexus_request):
+        self._request = nexus_request
+
+    def render(self, tornado_handler, result):
+        tornado_handler.set_header("Content-Type", "application/zip")
+        tornado_handler.set_header("Content-Disposition", "filename=\"%s\"" % self._request.get_argument('filename', "download.zip"))
+        try:
+            self.write(result.toZip())
+        except:
+            traceback.print_exc(file=sys.stdout)
+            raise NexusProcessingException(reason="Unable to convert results to Zip.")
diff --git a/analysis/webservice/nexus_tornado/request/renderers/__init__.py b/analysis/webservice/nexus_tornado/request/renderers/__init__.py
new file mode 100644
index 0000000..807eb7a
--- /dev/null
+++ b/analysis/webservice/nexus_tornado/request/renderers/__init__.py
@@ -0,0 +1,6 @@
+from .NexusRendererFactory import NexusRendererFactory
+from .NexusJSONRenderer import NexusJSONRenderer
+from .NexusCSVRenderer import NexusCSVRenderer
+from .NexusNETCDFRenderer import NexusNETCDFRenderer
+from .NexusPNGRenderer import NexusPNGRenderer
+from .NexusZIPRenderer import NexusZIPRenderer
\ No newline at end of file
diff --git a/analysis/webservice/webapp.py b/analysis/webservice/webapp.py
index 55792ce..adfedda 100644
--- a/analysis/webservice/webapp.py
+++ b/analysis/webservice/webapp.py
@@ -13,147 +13,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
+import os
 import ConfigParser
 import importlib
-import json
 import logging
 import sys
-import traceback
-
-import matplotlib
 import pkg_resources
 import tornado.web
+import webservice.algorithms_spark.NexusCalcSparkHandler
 from tornado.options import define, options, parse_command_line
-from webservice import NexusHandler
-from webservice.webmodel import NexusRequestObject, NexusProcessingException
-
-matplotlib.use('Agg')
-
-
-class ContentTypes(object):
-    CSV = "CSV"
-    JSON = "JSON"
-    XML = "XML"
-    PNG = "PNG"
-    NETCDF = "NETCDF"
-    ZIP = "ZIP"
-
-
-class BaseHandler(tornado.web.RequestHandler):
-    path = r"/"
-
-    def initialize(self, thread_pool):
-        self.logger = logging.getLogger('nexus')
-        self.executor = thread_pool
-
-    @tornado.gen.coroutine
-    def get(self):
-        self.logger.info("Received request %s" % self._request_summary())
-        yield self.run()
-
-    @tornado.concurrent.run_on_executor
-    def run(self):
-        reqObject = NexusRequestObject(self)
-        try:
-            result = self.do_get(reqObject)
-            self.async_callback(result)
-        except NexusProcessingException as e:
-            self.async_onerror_callback(e.reason, e.code)
-        except Exception as e:
-            self.async_onerror_callback(str(e), 500)
-
-    def async_onerror_callback(self, reason, code=500):
-        self.logger.error("Error processing request", exc_info=True)
-
-        self.set_header("Content-Type", "application/json")
-        self.set_status(code)
-
-        response = {
-            "error": reason,
-            "code": code
-        }
 
-        self.write(json.dumps(response, indent=5))
-        self.finish()
-
-    def async_callback(self, result):
-        pass
-
-    ''' Override me for standard handlers! '''
-
-    def do_get(self, reqObject):
-
-        for root, dirs, files in os.walk("."):
-            for pyfile in [afile for afile in files if afile.endswith(".py")]:
-                print(os.path.join(root, pyfile))
-                with open(os.path.join(root, pyfile), 'r') as original: data = original.read()
-                with open(os.path.join(root, pyfile), 'w') as modified: modified.write(license + "\n" + data)
-        pass
-
-
-class ModularNexusHandlerWrapper(BaseHandler):
-    def initialize(self, thread_pool, clazz=None, algorithm_config=None, sc=None):
-        BaseHandler.initialize(self, thread_pool)
-        self.__algorithm_config = algorithm_config
-        self.__clazz = clazz
-        self.__sc = sc
-
-    def do_get(self, request):
-        instance = self.__clazz.instance(algorithm_config=self.__algorithm_config, sc=self.__sc)
-
-        results = instance.calc(request)
-
-        try:
-            self.set_status(results.status_code)
-        except AttributeError:
-            pass
-
-        if request.get_content_type() == ContentTypes.JSON:
-            self.set_header("Content-Type", "application/json")
-            try:
-                self.write(results.toJson())
-            except AttributeError:
-                traceback.print_exc(file=sys.stdout)
-                self.write(json.dumps(results, indent=4))
-        elif request.get_content_type() == ContentTypes.PNG:
-            self.set_header("Content-Type", "image/png")
-            try:
-                self.write(results.toImage())
-            except AttributeError:
-                traceback.print_exc(file=sys.stdout)
-                raise NexusProcessingException(reason="Unable to convert results to an Image.")
-        elif request.get_content_type() == ContentTypes.CSV:
-            self.set_header("Content-Type", "text/csv")
-            self.set_header("Content-Disposition", "filename=\"%s\"" % request.get_argument('filename', "download.csv"))
-            try:
-                self.write(results.toCSV())
-            except:
-                traceback.print_exc(file=sys.stdout)
-                raise NexusProcessingException(reason="Unable to convert results to CSV.")
-        elif request.get_content_type() == ContentTypes.NETCDF:
-            self.set_header("Content-Type", "application/x-netcdf")
-            self.set_header("Content-Disposition", "filename=\"%s\"" % request.get_argument('filename', "download.nc"))
-            try:
-                self.write(results.toNetCDF())
-            except:
-                traceback.print_exc(file=sys.stdout)
-                raise NexusProcessingException(reason="Unable to convert results to NetCDF.")
-        elif request.get_content_type() == ContentTypes.ZIP:
-            self.set_header("Content-Type", "application/zip")
-            self.set_header("Content-Disposition", "filename=\"%s\"" % request.get_argument('filename', "download.zip"))
-            try:
-                self.write(results.toZip())
-            except:
-                traceback.print_exc(file=sys.stdout)
-                raise NexusProcessingException(reason="Unable to convert results to Zip.")
-
-        return results
-
-    def async_callback(self, result):
-        super(ModularNexusHandlerWrapper, self).async_callback(result)
-        if hasattr(result, 'cleanup'):
-            result.cleanup()
+from webservice import NexusHandler
+from webservice.nexus_tornado.request.handlers import NexusRequestHandler
 
 def inject_args_in_config(args, config):
     """
@@ -227,22 +98,20 @@ if __name__ == "__main__":
 
     spark_context = None
     for clazzWrapper in NexusHandler.AVAILABLE_HANDLERS:
-        if issubclass(clazzWrapper.clazz(), NexusHandler.SparkHandler):
+        if issubclass(clazzWrapper, webservice.algorithms_spark.NexusCalcSparkHandler.NexusCalcSparkHandler):
             if spark_context is None:
-                from pyspark import SparkConf
                 from pyspark.sql import SparkSession
-
                 spark = SparkSession.builder.appName("nexus-analysis").getOrCreate()
                 spark_context = spark.sparkContext
 
             handlers.append(
-                (clazzWrapper.path(), ModularNexusHandlerWrapper,
+                (clazzWrapper.path, NexusRequestHandler,
                  dict(clazz=clazzWrapper, algorithm_config=algorithm_config, sc=spark_context,
                       thread_pool=request_thread_pool)))
         else:
             handlers.append(
-                (clazzWrapper.path(), ModularNexusHandlerWrapper,
-                 dict(clazz=clazzWrapper, algorithm_config=algorithm_config, thread_pool=request_thread_pool)))
+                (clazzWrapper.path, NexusRequestHandler,
+                 dict(clazz=clazzWrapper, thread_pool=request_thread_pool)))
 
 
     class VersionHandler(tornado.web.RequestHandler):
diff --git a/analysis/webservice/webapp_livy.py b/analysis/webservice/webapp_livy.py
index ed8a638..029d812 100644
--- a/analysis/webservice/webapp_livy.py
+++ b/analysis/webservice/webapp_livy.py
@@ -19,11 +19,11 @@ import logging
 import sys
 import os
 import pkg_resources
-import tornado.web
-from tornado.options import define, options, parse_command_line
+import nexus_tornado.web
+from nexus_tornado.options import define, options, parse_command_line
 from webservice.NexusLivyHandler import LivyHandler
 
-class RunFileHandler(tornado.web.RequestHandler):
+class RunFileHandler(nexus_tornado.web.RequestHandler):
 
     _id = 0
             
@@ -47,7 +47,7 @@ class RunFileHandler(tornado.web.RequestHandler):
         self.write(str(ans))
 
 
-class RunStrHandler(tornado.web.RequestHandler):
+class RunStrHandler(nexus_tornado.web.RequestHandler):
             
     def __init__(self, *args, **kwargs):
         self._lh = kwargs.pop('lh', None)
@@ -91,11 +91,11 @@ if __name__ == "__main__":
     handlers.append((r"/run_str", RunStrHandler, dict(lh=lh)))
 
     # Start listening for job requests.
-    app = tornado.web.Application(
+    app = nexus_tornado.web.Application(
         handlers,
         default_host=options.address,
         debug=options.debug
     )
     app.listen(options.port)
     log.info("Started HTTP listener...")
-    tornado.ioloop.IOLoop.current().start()
+    nexus_tornado.ioloop.IOLoop.current().start()
diff --git a/analysis/webservice/webmodel.py b/analysis/webservice/webmodel.py
deleted file mode 100644
index 0f98c30..0000000
--- a/analysis/webservice/webmodel.py
+++ /dev/null
@@ -1,520 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import hashlib
-import inspect
-import json
-import re
-import time
-from datetime import datetime
-from decimal import Decimal
-
-import numpy as np
-from pytz import UTC, timezone
-from shapely.geometry import Polygon
-
-EPOCH = timezone('UTC').localize(datetime(1970, 1, 1))
-ISO_8601 = '%Y-%m-%dT%H:%M:%S%z'
-
-
-class RequestParameters(object):
-    SEASONAL_CYCLE_FILTER = "seasonalFilter"
-    MAX_LAT = "maxLat"
-    MIN_LAT = "minLat"
-    MAX_LON = "maxLon"
-    MIN_LON = "minLon"
-    DATASET = "ds"
-    ENVIRONMENT = "env"
-    OUTPUT = "output"
-    START_TIME = "startTime"
-    END_TIME = "endTime"
-    START_YEAR = "startYear"
-    END_YEAR = "endYear"
-    CLIM_MONTH = "month"
-    START_ROW = "start"
-    ROW_COUNT = "numRows"
-    APPLY_LOW_PASS = "lowPassFilter"
-    LOW_CUT = "lowCut"
-    ORDER = "lpOrder"
-    PLOT_SERIES = "plotSeries"
-    PLOT_TYPE = "plotType"
-    NPARTS = "nparts"
-    METADATA_FILTER = "metadataFilter"
-
-
-class StandardNexusErrors:
-    UNKNOWN = 1000
-    NO_DATA = 1001
-    DATASET_MISSING = 1002
-
-
-class NexusProcessingException(Exception):
-    def __init__(self, error=StandardNexusErrors.UNKNOWN, reason="", code=500):
-        self.error = error
-        self.reason = reason
-        self.code = code
-        Exception.__init__(self, reason)
-
-
-class NoDataException(NexusProcessingException):
-    def __init__(self, reason="No data found for the selected timeframe"):
-        NexusProcessingException.__init__(self, StandardNexusErrors.NO_DATA, reason, 400)
-
-
-class DatasetNotFoundException(NexusProcessingException):
-    def __init__(self, reason="Dataset not found"):
-        NexusProcessingException.__init__(self, StandardNexusErrors.DATASET_MISSING, reason, code=404)
-
-
-class StatsComputeOptions(object):
-    def __init__(self):
-        pass
-
-    def get_apply_seasonal_cycle_filter(self, default="false"):
-        raise Exception("Please implement")
-
-    def get_max_lat(self, default=90.0):
-        raise Exception("Please implement")
-
-    def get_min_lat(self, default=-90.0):
-        raise Exception("Please implement")
-
-    def get_max_lon(self, default=180):
-        raise Exception("Please implement")
-
-    def get_min_lon(self, default=-180):
-        raise Exception("Please implement")
-
-    def get_dataset(self):
-        raise Exception("Please implement")
-
-    def get_environment(self):
-        raise Exception("Please implement")
-
-    def get_start_time(self):
-        raise Exception("Please implement")
-
-    def get_end_time(self):
-        raise Exception("Please implement")
-
-    def get_start_year(self):
-        raise Exception("Please implement")
-
-    def get_end_year(self):
-        raise Exception("Please implement")
-
-    def get_clim_month(self):
-        raise Exception("Please implement")
-
-    def get_start_row(self):
-        raise Exception("Please implement")
-
-    def get_end_row(self):
-        raise Exception("Please implement")
-
-    def get_content_type(self):
-        raise Exception("Please implement")
-
-    def get_apply_low_pass_filter(self, default=False):
-        raise Exception("Please implement")
-
-    def get_low_pass_low_cut(self, default=12):
-        raise Exception("Please implement")
-
-    def get_low_pass_order(self, default=9):
-        raise Exception("Please implement")
-
-    def get_plot_series(self, default="mean"):
-        raise Exception("Please implement")
-
-    def get_plot_type(self, default="default"):
-        raise Exception("Please implement")
-
-    def get_nparts(self):
-        raise Exception("Please implement")
-
-
-class NexusRequestObject(StatsComputeOptions):
-    shortNamePattern = re.compile("^[a-zA-Z0-9_\-,\.]+$")
-    floatingPointPattern = re.compile('[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?')
-
-    def __init__(self, reqHandler):
-        if reqHandler is None:
-            raise Exception("Request handler cannot be null")
-        self.requestHandler = reqHandler
-        StatsComputeOptions.__init__(self)
-
-    def get_argument(self, name, default=None):
-        return self.requestHandler.get_argument(name, default=default)
-
-    def get_list_int_arg(self, name, default=None):
-        arg = self.get_argument(name, default=default)
-        return arg.split(',')
-
-    def __validate_is_shortname(self, v):
-        if v is None or len(v) == 0:
-            return False
-        return self.shortNamePattern.match(v) is not None
-
-    def __validate_is_number(self, v):
-        if v is None or (type(v) == str and len(v) == 0):
-            return False
-        elif type(v) == int or type(v) == float:
-            return True
-        else:
-            return self.floatingPointPattern.match(v) is not None
-
-    def get_float_arg(self, name, default=0.0):
-        arg = self.get_argument(name, default)
-        if self.__validate_is_number(arg):
-            return float(arg)
-        else:
-            return default
-
-    def get_decimal_arg(self, name, default=0.0):
-        arg = self.get_argument(name, default)
-        if self.__validate_is_number(arg):
-            return Decimal(arg)
-        else:
-            if default is None:
-                return None
-            return Decimal(default)
-
-    def get_int_arg(self, name, default=0):
-        arg = self.get_argument(name, default)
-        if self.__validate_is_number(arg):
-            return int(arg)
-        else:
-            return default
-
-    def get_boolean_arg(self, name, default=False):
-        arg = self.get_argument(name, "false" if not default else "true")
-        return arg is not None and arg in ['true', '1', 't', 'y', 'yes', 'True', 'T', 'Y',
-                                           'Yes', True]
-
-    def get_datetime_arg(self, name, default=None):
-        time_str = self.get_argument(name, default=default)
-        if time_str == default:
-            return default
-        try:
-            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
-        except ValueError:
-            dt = datetime.utcfromtimestamp(int(time_str)).replace(tzinfo=UTC)
-        return dt
-
-    def get_apply_seasonal_cycle_filter(self, default=True):
-        return self.get_boolean_arg(RequestParameters.SEASONAL_CYCLE_FILTER, default=default)
-
-    def get_max_lat(self, default=Decimal(90)):
-        return self.get_decimal_arg("maxLat", default)
-
-    def get_min_lat(self, default=Decimal(-90)):
-        return self.get_decimal_arg("minLat", default)
-
-    def get_max_lon(self, default=Decimal(180)):
-        return self.get_decimal_arg("maxLon", default)
-
-    def get_min_lon(self, default=Decimal(-180)):
-        return self.get_decimal_arg("minLon", default)
-
-    def get_bounding_polygon(self):
-        west, south, east, north = [float(b) for b in self.get_argument("b").split(",")]
-        polygon = Polygon([(west, south), (east, south), (east, north), (west, north), (west, south)])
-        return polygon
-
-    def get_dataset(self):
-        ds = self.get_argument(RequestParameters.DATASET, None)
-        if ds is not None and not self.__validate_is_shortname(ds):
-            raise Exception("Invalid shortname")
-        else:
-            return ds.split(",")
-
-    def get_metadata_filter(self):
-        return self.requestHandler.get_arguments(RequestParameters.METADATA_FILTER)
-
-    def get_environment(self):
-        env = self.get_argument(RequestParameters.ENVIRONMENT, None)
-        if env is None and "Origin" in self.requestHandler.request.headers:
-            origin = self.requestHandler.request.headers["Origin"]
-            if origin == "http://localhost:63342":
-                env = "DEV"
-            if origin == "https://sealevel.uat.earthdata.nasa.gov":
-                env = "UAT"
-            elif origin == "https://sealevel.sit.earthdata.nasa.gov":
-                env = "SIT"
-            elif origin == "https://sealevel.earthdata.nasa.gov":
-                env = "PROD"
-
-        if env not in ("DEV", "SIT", "UAT", "PROD", None):
-            raise Exception("Invalid Environment")
-        else:
-            return env
-
-    def get_start_time(self):
-        return self.get_int_arg(RequestParameters.START_TIME, 0)
-
-    def get_end_time(self):
-        return self.get_int_arg(RequestParameters.END_TIME, -1)
-
-    def get_start_year(self):
-        return self.get_int_arg(RequestParameters.START_YEAR, 0)
-
-    def get_end_year(self):
-        return self.get_int_arg(RequestParameters.END_YEAR, -1)
-
-    def get_clim_month(self):
-        return self.get_int_arg(RequestParameters.CLIM_MONTH, -1)
-
-    def get_start_datetime(self):
-        time_str = self.get_argument(RequestParameters.START_TIME)
-        try:
-            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
-        except ValueError:
-            dt = datetime.utcfromtimestamp(int(time_str)).replace(tzinfo=UTC)
-        return dt
-
-    def get_end_datetime(self):
-        time_str = self.get_argument(RequestParameters.END_TIME)
-        try:
-            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
-        except ValueError:
-            dt = datetime.utcfromtimestamp(int(time_str)).replace(tzinfo=UTC)
-        return dt
-
-    def get_start_datetime_ms(self):
-        time_str = self.get_argument(RequestParameters.START_TIME)
-        try:
-            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
-        except ValueError:
-            dt = datetime.utcfromtimestamp(int(time_str) / 1000).replace(tzinfo=UTC)
-        return dt
-
-    def get_end_datetime_ms(self):
-        time_str = self.get_argument(RequestParameters.END_TIME)
-        try:
-            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
-        except ValueError:
-            dt = datetime.utcfromtimestamp(int(time_str) / 1000).replace(tzinfo=UTC)
-        return dt
-
-    def get_start_row(self):
-        return self.get_int_arg(RequestParameters.START_ROW, 0)
-
-    def get_row_count(self):
-        return self.get_int_arg(RequestParameters.ROW_COUNT, 10)
-
-    def get_content_type(self):
-        return self.get_argument(RequestParameters.OUTPUT, "JSON")
-
-    def get_apply_low_pass_filter(self, default=True):
-        return self.get_boolean_arg(RequestParameters.APPLY_LOW_PASS, default)
-
-    def get_low_pass_low_cut(self, default=12):
-        return self.get_float_arg(RequestParameters.LOW_CUT, default)
-
-    def get_low_pass_order(self, default=9):
-        return self.get_float_arg(RequestParameters.ORDER, default)
-
-    def get_include_meta(self):
-        return self.get_boolean_arg("includemeta", True)
-
-    def get_plot_series(self, default="mean"):
-        return self.get_argument(RequestParameters.PLOT_SERIES, default=default)
-
-    def get_plot_type(self, default="default"):
-        return self.get_argument(RequestParameters.PLOT_TYPE, default=default)
-
-    def get_nparts(self):
-        return self.get_int_arg(RequestParameters.NPARTS, 0)
-
-
-class NexusResults:
-    def __init__(self, results=None, meta=None, stats=None, computeOptions=None, status_code=200, **args):
-        self.status_code = status_code
-        self.__results = results
-        self.__meta = meta if meta is not None else {}
-        self.__stats = stats if stats is not None else {}
-        self.__computeOptions = computeOptions
-        if computeOptions is not None:
-            self.__minLat = computeOptions.get_min_lat()
-            self.__maxLat = computeOptions.get_max_lat()
-            self.__minLon = computeOptions.get_min_lon()
-            self.__maxLon = computeOptions.get_max_lon()
-            self.__ds = computeOptions.get_dataset()
-            self.__startTime = computeOptions.get_start_time()
-            self.__endTime = computeOptions.get_end_time()
-        else:
-            self.__minLat = args["minLat"] if "minLat" in args else -90.0
-            self.__maxLat = args["maxLat"] if "maxLat" in args else 90.0
-            self.__minLon = args["minLon"] if "minLon" in args else -180.0
-            self.__maxLon = args["maxLon"] if "maxLon" in args else 180.0
-            self.__ds = args["ds"] if "ds" in args else None
-            self.__startTime = args["startTime"] if "startTime" in args else None
-            self.__endTime = args["endTime"] if "endTime" in args else None
-
-        self.extendMeta(minLat=self.__minLat,
-                        maxLat=self.__maxLat,
-                        minLon=self.__minLon,
-                        maxLon=self.__maxLon,
-                        ds=self.__ds,
-                        startTime=self.__startTime,
-                        endTime=self.__endTime)
-
-    def computeOptions(self):
-        return self.__computeOptions
-
-    def results(self):
-        return self.__results
-
-    def meta(self):
-        return self.__meta
-
-    def stats(self):
-        return self.__stats
-
-    def _extendMeta(self, meta, minLat, maxLat, minLon, maxLon, ds, startTime, endTime):
-        if meta is None:
-            return None
-
-        meta["shortName"] = ds
-        if "title" in meta and "units" in meta:
-            meta["label"] = "%s (%s)" % (meta["title"], meta["units"])
-        if all(p is not None for p in [minLat, maxLat, minLon, maxLon]):
-            meta["bounds"] = {
-                "east": maxLon,
-                "west": minLon,
-                "north": maxLat,
-                "south": minLat
-            }
-        if startTime is not None and endTime is not None:
-            meta["time"] = {
-                "start": startTime,
-                "stop": endTime,
-                "iso_start": datetime.utcfromtimestamp(int(startTime)).replace(tzinfo=timezone('UTC')).strftime(ISO_8601),
-                "iso_stop": datetime.utcfromtimestamp(int(endTime)).replace(tzinfo=timezone('UTC')).strftime(ISO_8601)
-            }
-        return meta
-
-    def extendMeta(self, minLat, maxLat, minLon, maxLon, ds, startTime, endTime):
-        if self.__meta is None:
-            return None
-        if type(ds) == list:
-            for i in range(0, len(ds)):
-                shortName = ds[i]
-
-                if type(self.__meta) == list:
-                    subMeta = self.__meta[i]
-                else:
-                    subMeta = self.__meta  # Risky
-                self._extendMeta(subMeta, minLat, maxLat, minLon, maxLon, shortName, startTime, endTime)
-        else:
-            if type(self.__meta) == list:
-                self.__meta = self.__meta[0]
-            else:
-                self.__meta = self.__meta  # Risky
-            self._extendMeta(self.__meta, minLat, maxLat, minLon, maxLon, ds, startTime, endTime)
-
-    def toJson(self):
-        data = {
-            'meta': self.__meta,
-            'data': self.__results,
-            'stats': self.__stats
-        }
-        return json.dumps(data, indent=4, cls=CustomEncoder)
-
-    def toImage(self):
-        raise Exception("Not implemented for this result type")
-
-
-class CustomEncoder(json.JSONEncoder):
-    def default(self, obj):
-        """If input object is an ndarray it will be converted into a dict
-        holding dtype, shape and the data, base64 encoded.
-        """
-        numpy_types = (
-            np.bool_,
-            # np.bytes_, -- python `bytes` class is not json serializable
-            # np.complex64,  -- python `complex` class is not json serializable
-            # np.complex128,  -- python `complex` class is not json serializable
-            # np.complex256,  -- python `complex` class is not json serializable
-            # np.datetime64,  -- python `datetime.datetime` class is not json serializable
-            np.float16,
-            np.float32,
-            np.float64,
-            # np.float128,  -- special handling below
-            np.int8,
-            np.int16,
-            np.int32,
-            np.int64,
-            # np.object_  -- should already be evaluated as python native
-            np.str_,
-            np.uint8,
-            np.uint16,
-            np.uint32,
-            np.uint64,
-            np.void,
-        )
-        if isinstance(obj, np.ndarray):
-            return obj.tolist()
-        elif isinstance(obj, numpy_types):
-            return obj.item()
-        elif isinstance(obj, np.float128):
-            return obj.astype(np.float64).item()
-        elif isinstance(obj, Decimal):
-            return str(obj)
-        elif isinstance(obj, datetime):
-            return str(obj)
-        elif obj is np.ma.masked:
-            return str(np.NaN)
-        # Let the base class default method raise the TypeError
-        return json.JSONEncoder.default(self, obj)
-
-
-__CACHE = {}
-
-
-def cached(ttl=60000):
-    def _hash_function_signature(func):
-        hash_object = hashlib.md5(str(inspect.getargspec(func)) + str(func))
-        return hash_object.hexdigest()
-
-    def _now():
-        return int(round(time.time() * 1000))
-
-    def _expired(t):
-        if t is None or _now() - t > ttl:
-            return True
-        else:
-            return False
-
-    def _cached_decorator(func):
-
-        def func_wrapper(self, computeOptions, **args):
-            hash = _hash_function_signature(func)
-            force = computeOptions.get_boolean_arg("nocached", default=False)
-
-            if force or hash not in __CACHE or (hash in __CACHE and _expired(__CACHE[hash]["time"])):
-                result = func(self, computeOptions, **args)
-                __CACHE[hash] = {
-                    "time": _now(),
-                    "result": result
-                }
-
-            return __CACHE[hash]["result"]
-
-        return func_wrapper
-
-    return _cached_decorator
diff --git a/analysis/webservice/webmodel/CustomEncoder.py b/analysis/webservice/webmodel/CustomEncoder.py
new file mode 100644
index 0000000..a4f44c3
--- /dev/null
+++ b/analysis/webservice/webmodel/CustomEncoder.py
@@ -0,0 +1,49 @@
+import json
+from datetime import datetime
+from decimal import Decimal
+
+import numpy as np
+
+
+class CustomEncoder(json.JSONEncoder):
+    def default(self, obj):
+        """If input object is an ndarray it will be converted into a dict
+        holding dtype, shape and the data, base64 encoded.
+        """
+        numpy_types = (
+            np.bool_,
+            # np.bytes_, -- python `bytes` class is not json serializable
+            # np.complex64,  -- python `complex` class is not json serializable
+            # np.complex128,  -- python `complex` class is not json serializable
+            # np.complex256,  -- python `complex` class is not json serializable
+            # np.datetime64,  -- python `datetime.datetime` class is not json serializable
+            np.float16,
+            np.float32,
+            np.float64,
+            # np.float128,  -- special handling below
+            np.int8,
+            np.int16,
+            np.int32,
+            np.int64,
+            # np.object_  -- should already be evaluated as python native
+            np.str_,
+            np.uint8,
+            np.uint16,
+            np.uint32,
+            np.uint64,
+            np.void,
+        )
+        if isinstance(obj, np.ndarray):
+            return obj.tolist()
+        elif isinstance(obj, numpy_types):
+            return obj.item()
+        elif isinstance(obj, np.float128):
+            return obj.astype(np.float64).item()
+        elif isinstance(obj, Decimal):
+            return str(obj)
+        elif isinstance(obj, datetime):
+            return str(obj)
+        elif obj is np.ma.masked:
+            return str(np.NaN)
+        # Let the base class default method raise the TypeError
+        return json.JSONEncoder.default(self, obj)
\ No newline at end of file
diff --git a/analysis/webservice/webmodel/Exceptions.py b/analysis/webservice/webmodel/Exceptions.py
new file mode 100644
index 0000000..c07174e
--- /dev/null
+++ b/analysis/webservice/webmodel/Exceptions.py
@@ -0,0 +1,19 @@
+from webservice.webmodel.StandardNexusErrors import StandardNexusErrors
+
+
+class NexusProcessingException(Exception):
+    def __init__(self, error=StandardNexusErrors.UNKNOWN, reason="", code=500):
+        self.error = error
+        self.reason = reason
+        self.code = code
+        Exception.__init__(self, reason)
+
+
+class NoDataException(NexusProcessingException):
+    def __init__(self, reason="No data found for the selected timeframe"):
+        NexusProcessingException.__init__(self, StandardNexusErrors.NO_DATA, reason, 400)
+
+
+class DatasetNotFoundException(NexusProcessingException):
+    def __init__(self, reason="Dataset not found"):
+        NexusProcessingException.__init__(self, StandardNexusErrors.DATASET_MISSING, reason, code=404)
\ No newline at end of file
diff --git a/analysis/webservice/webmodel/NexusRequestObject.py b/analysis/webservice/webmodel/NexusRequestObject.py
new file mode 100644
index 0000000..f118484
--- /dev/null
+++ b/analysis/webservice/webmodel/NexusRequestObject.py
@@ -0,0 +1,227 @@
+import logging
+import re
+from datetime import datetime
+from decimal import Decimal
+
+from pytz import UTC
+from shapely.geometry import Polygon
+from webservice.webmodel.RequestParameters import RequestParameters
+from webservice.webmodel.StatsComputeOptions import StatsComputeOptions
+
+
+class NexusRequestObject(StatsComputeOptions):
+    shortNamePattern = re.compile("^[a-zA-Z0-9_\-,\.]+$")
+    floatingPointPattern = re.compile('[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?')
+
+    def __init__(self, reqHandler):
+        self.__log = logging.getLogger(__name__)
+        if reqHandler is None:
+            raise Exception("Request handler cannot be null")
+        self.requestHandler = reqHandler
+        StatsComputeOptions.__init__(self)
+
+    def get_argument(self, name, default=None):
+        return self.requestHandler.get_argument(name, default=default)
+
+    def get_list_int_arg(self, name, default=None):
+        arg = self.get_argument(name, default=default)
+        return arg.split(',')
+
+    def __validate_is_shortname(self, v):
+        if v is None or len(v) == 0:
+            return False
+        return self.shortNamePattern.match(v) is not None
+
+    def __validate_is_number(self, v):
+        if v is None or (type(v) == str and len(v) == 0):
+            return False
+        elif type(v) == int or type(v) == float:
+            return True
+        else:
+            return self.floatingPointPattern.match(v) is not None
+
+    def get_float_arg(self, name, default=0.0):
+        arg = self.get_argument(name, default)
+        if self.__validate_is_number(arg):
+            return float(arg)
+        else:
+            return default
+
+    def get_decimal_arg(self, name, default=0.0):
+        arg = self.get_argument(name, default)
+        if self.__validate_is_number(arg):
+            return Decimal(arg)
+        else:
+            if default is None:
+                return None
+            return Decimal(default)
+
+    def get_int_arg(self, name, default=0):
+        arg = self.get_argument(name, default)
+        if self.__validate_is_number(arg):
+            return int(arg)
+        else:
+            return default
+
+    def get_boolean_arg(self, name, default=False):
+        arg = self.get_argument(name, "false" if not default else "true")
+        return arg is not None and arg in ['true', '1', 't', 'y', 'yes', 'True', 'T', 'Y',
+                                           'Yes', True]
+
+    def get_datetime_arg(self, name, default=None):
+        time_str = self.get_argument(name, default=default)
+        if time_str == default:
+            return default
+        try:
+            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
+        except ValueError:
+            dt = datetime.utcfromtimestamp(int(time_str)).replace(tzinfo=UTC)
+        return dt
+
+    def get_apply_seasonal_cycle_filter(self, default=True):
+        return self.get_boolean_arg(RequestParameters.SEASONAL_CYCLE_FILTER, default=default)
+
+    def get_max_lat(self, default=Decimal(90)):
+        return self.get_decimal_arg("maxLat", default)
+
+    def get_min_lat(self, default=Decimal(-90)):
+        return self.get_decimal_arg("minLat", default)
+
+    def get_max_lon(self, default=Decimal(180)):
+        return self.get_decimal_arg("maxLon", default)
+
+    def get_min_lon(self, default=Decimal(-180)):
+        return self.get_decimal_arg("minLon", default)
+
+    # added to fit the simplified version of TimeAvgMapSpark parse_argumemt
+    def get_bounding_box(self):
+
+        b = self.get_argument("b", '')
+        if b:
+            min_lon, min_lat, max_lon, max_lat = [float(e) for e in b.split(",")]
+        else:
+            max_lat = self.get_argument("maxLat", 90)
+            max_lat = Decimal(max_lat) if self.__validate_is_number(max_lat) else 90
+
+            min_lat = self.get_argument("minLat", -90)
+            min_lat = Decimal(min_lat) if self.__validate_is_number(min_lat) else -90
+
+            max_lon = self.get_argument("maxLon", 180)
+            max_lon = Decimal(max_lon) if self.__validate_is_number(max_lon) else 180
+
+            min_lon = self.get_argument("minLon", -90)
+            min_lon = Decimal(min_lon) if self.__validate_is_number(min_lon) else -90
+
+        return min_lon, min_lat, max_lon, max_lat
+
+
+    def get_bounding_polygon(self):
+        west, south, east, north = [float(b) for b in self.get_argument("b").split(",")]
+        polygon = Polygon([(west, south), (east, south), (east, north), (west, north), (west, south)])
+        return polygon
+
+    def get_dataset(self):
+        ds = self.get_argument(RequestParameters.DATASET, None)
+        if ds is not None and not self.__validate_is_shortname(ds):
+            raise Exception("Invalid shortname")
+        else:
+            return ds.split(",")
+
+    def get_metadata_filter(self):
+        return self.requestHandler.get_arguments(RequestParameters.METADATA_FILTER)
+
+    def get_environment(self):
+        env = self.get_argument(RequestParameters.ENVIRONMENT, None)
+        if env is None and "Origin" in self.requestHandler.request.headers:
+            origin = self.requestHandler.request.headers["Origin"]
+            if origin == "http://localhost:63342":
+                env = "DEV"
+            if origin == "https://sealevel.uat.earthdata.nasa.gov":
+                env = "UAT"
+            elif origin == "https://sealevel.sit.earthdata.nasa.gov":
+                env = "SIT"
+            elif origin == "https://sealevel.earthdata.nasa.gov":
+                env = "PROD"
+
+        if env not in ("DEV", "SIT", "UAT", "PROD", None):
+            raise Exception("Invalid Environment")
+        else:
+            return env
+
+    def get_start_time(self):
+        return self.get_int_arg(RequestParameters.START_TIME, 0)
+
+    def get_end_time(self):
+        return self.get_int_arg(RequestParameters.END_TIME, -1)
+
+    def get_start_year(self):
+        return self.get_int_arg(RequestParameters.START_YEAR, 0)
+
+    def get_end_year(self):
+        return self.get_int_arg(RequestParameters.END_YEAR, -1)
+
+    def get_clim_month(self):
+        return self.get_int_arg(RequestParameters.CLIM_MONTH, -1)
+
+    def get_start_datetime(self):
+        #self.__log("get start datetime as {}".format(RequestParameters.START_TIME))
+        time_str = self.get_argument(RequestParameters.START_TIME)
+        try:
+            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
+        except ValueError:
+            dt = datetime.utcfromtimestamp(int(time_str)).replace(tzinfo=UTC)
+        return dt
+
+    def get_end_datetime(self):
+        time_str = self.get_argument(RequestParameters.END_TIME)
+        try:
+            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
+        except ValueError:
+            dt = datetime.utcfromtimestamp(int(time_str)).replace(tzinfo=UTC)
+        return dt
+
+    def get_start_datetime_ms(self):
+        time_str = self.get_argument(RequestParameters.START_TIME)
+        try:
+            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
+        except ValueError:
+            dt = datetime.utcfromtimestamp(int(time_str) / 1000).replace(tzinfo=UTC)
+        return dt
+
+    def get_end_datetime_ms(self):
+        time_str = self.get_argument(RequestParameters.END_TIME)
+        try:
+            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
+        except ValueError:
+            dt = datetime.utcfromtimestamp(int(time_str) / 1000).replace(tzinfo=UTC)
+        return dt
+
+    def get_start_row(self):
+        return self.get_int_arg(RequestParameters.START_ROW, 0)
+
+    def get_row_count(self):
+        return self.get_int_arg(RequestParameters.ROW_COUNT, 10)
+
+    def get_content_type(self):
+        return self.get_argument(RequestParameters.OUTPUT, "JSON")
+
+    def get_apply_low_pass_filter(self, default=True):
+        return self.get_boolean_arg(RequestParameters.APPLY_LOW_PASS, default)
+
+    def get_low_pass_low_cut(self, default=12):
+        return self.get_float_arg(RequestParameters.LOW_CUT, default)
+
+    def get_low_pass_order(self, default=9):
+        return self.get_float_arg(RequestParameters.ORDER, default)
+
+    def get_include_meta(self):
+        return self.get_boolean_arg("includemeta", True)
+
+    def get_plot_series(self, default="mean"):
+        return self.get_argument(RequestParameters.PLOT_SERIES, default=default)
+
+    def get_plot_type(self, default="default"):
+        return self.get_argument(RequestParameters.PLOT_TYPE, default=default)
+
+    def get_nparts(self):
+        return self.get_int_arg(RequestParameters.NPARTS, 0)
\ No newline at end of file
diff --git a/analysis/webservice/webmodel/NexusRequestObjectTornadoFree.py b/analysis/webservice/webmodel/NexusRequestObjectTornadoFree.py
new file mode 100644
index 0000000..d6d512e
--- /dev/null
+++ b/analysis/webservice/webmodel/NexusRequestObjectTornadoFree.py
@@ -0,0 +1,108 @@
+import logging
+import re
+from datetime import datetime
+from decimal import Decimal
+
+from pytz import UTC
+from webservice.webmodel.RequestParameters import RequestParameters
+from webservice.webmodel.StatsComputeOptions import StatsComputeOptions
+
+
+class NexusRequestObjectTornadoFree(StatsComputeOptions):
+    shortNamePattern = re.compile("^[a-zA-Z0-9_\-,\.]+$")
+    floatingPointPattern = re.compile('[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?')
+
+    def __init__(self, request_handler):
+        self.__log = logging.getLogger(__name__)
+        if request_handler is None:
+            raise Exception("Request handler cannot be null")
+        StatsComputeOptions.__init__(self)
+
+        self._dataset = self._parse_dataset(request_handler)
+
+        self._bounding_box = self._parse_bounding_box(request_handler)
+
+        self._start_time = self._parse_start_time(request_handler)
+        self._end_time = self._parse_end_time(request_handler)
+
+        self._nparts = self._parse_nparts(request_handler)
+
+        self._content_type = self._parse_content_type(request_handler)
+
+    def get_dataset(self):
+        return self._dataset
+
+    def get_bounding_box(self):
+        return self._bounding_box
+
+    def get_start_datetime(self):
+        return self._start_time
+
+    def get_end_datetime(self):
+        return self._end_time
+
+    def get_nparts(self):
+        return self._nparts
+
+    def get_content_type(self):
+        return self._content_type
+
+    def _parse_dataset(self, request_handler):
+        ds = request_handler.get_argument(RequestParameters.DATASET, None)
+        if ds is not None and not self.__validate_is_shortname(ds):
+            raise Exception("Invalid shortname")
+
+        return ds
+
+    def _parse_bounding_box(self, request_handler):
+
+        b = request_handler.get_argument("b", '')
+        if b:
+            min_lon, min_lat, max_lon, max_lat = [float(e) for e in b.split(",")]
+        else:
+            max_lat = request_handler.get_argument("maxLat", 90)
+            max_lat = Decimal(max_lat) if self.__validate_is_number(max_lat) else 90
+
+            min_lat = request_handler.get_argument("minLat", -90)
+            min_lat = Decimal(min_lat) if self.__validate_is_number(min_lat) else -90
+
+            max_lon = request_handler.get_argument("maxLon", 180)
+            max_lon = Decimal(max_lon) if self.__validate_is_number(max_lon) else 180
+
+            min_lon = request_handler.get_argument("minLon", -90)
+            min_lon = Decimal(min_lon) if self.__validate_is_number(min_lon) else -90
+
+        return min_lon, min_lat, max_lon, max_lat
+
+    def _parse_start_time(self, request_handler):
+        return self._parse_time(request_handler, RequestParameters.START_TIME, default=0)
+
+    def _parse_end_time(self, request_handler):
+        return self._parse_time(request_handler, RequestParameters.END_TIME, default=-1)
+
+    def _parse_time(self, request_handler, arg_name, default=None):
+        time_str = request_handler.get_argument(arg_name, default)
+        try:
+            dt = datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=UTC)
+        except ValueError:
+            dt = datetime.utcfromtimestamp(int(time_str)).replace(tzinfo=UTC)
+        return dt
+
+    def _parse_nparts(self, request_handler):
+        return int(request_handler.get_argument(RequestParameters.NPARTS, 0))
+
+    def _parse_content_type(self, request_handler):
+        return request_handler.get_argument(RequestParameters.OUTPUT, "JSON")
+
+    def __validate_is_shortname(self, v):
+        if v is None or len(v) == 0:
+            return False
+        return self.shortNamePattern.match(v) is not None
+
+    def __validate_is_number(self, v):
+        if v is None or (type(v) == str and len(v) == 0):
+            return False
+        elif type(v) == int or type(v) == float:
+            return True
+        else:
+            return self.floatingPointPattern.match(v) is not None
\ No newline at end of file
diff --git a/analysis/webservice/webmodel/NexusResults.py b/analysis/webservice/webmodel/NexusResults.py
new file mode 100644
index 0000000..496234b
--- /dev/null
+++ b/analysis/webservice/webmodel/NexusResults.py
@@ -0,0 +1,104 @@
+import json
+from datetime import datetime
+
+from pytz import timezone
+from webservice.webmodel.CustomEncoder import CustomEncoder
+
+ISO_8601 = '%Y-%m-%dT%H:%M:%S%z'
+
+class NexusResults:
+    def __init__(self, results=None, meta=None, stats=None, computeOptions=None, status_code=200, **args):
+        self.status_code = status_code
+        self.__results = results
+        self.__meta = meta if meta is not None else {}
+        self.__stats = stats if stats is not None else {}
+        self.__computeOptions = computeOptions
+        if computeOptions is not None:
+            self.__minLat = computeOptions.get_min_lat()
+            self.__maxLat = computeOptions.get_max_lat()
+            self.__minLon = computeOptions.get_min_lon()
+            self.__maxLon = computeOptions.get_max_lon()
+            self.__ds = computeOptions.get_dataset()
+            self.__startTime = computeOptions.get_start_time()
+            self.__endTime = computeOptions.get_end_time()
+        else:
+            self.__minLat = args["minLat"] if "minLat" in args else -90.0
+            self.__maxLat = args["maxLat"] if "maxLat" in args else 90.0
+            self.__minLon = args["minLon"] if "minLon" in args else -180.0
+            self.__maxLon = args["maxLon"] if "maxLon" in args else 180.0
+            self.__ds = args["ds"] if "ds" in args else None
+            self.__startTime = args["startTime"] if "startTime" in args else None
+            self.__endTime = args["endTime"] if "endTime" in args else None
+
+        self.extendMeta(minLat=self.__minLat,
+                        maxLat=self.__maxLat,
+                        minLon=self.__minLon,
+                        maxLon=self.__maxLon,
+                        ds=self.__ds,
+                        startTime=self.__startTime,
+                        endTime=self.__endTime)
+
+    def computeOptions(self):
+        return self.__computeOptions
+
+    def results(self):
+        return self.__results
+
+    def meta(self):
+        return self.__meta
+
+    def stats(self):
+        return self.__stats
+
+    def _extendMeta(self, meta, minLat, maxLat, minLon, maxLon, ds, startTime, endTime):
+        if meta is None:
+            return None
+
+        meta["shortName"] = ds
+        if "title" in meta and "units" in meta:
+            meta["label"] = "%s (%s)" % (meta["title"], meta["units"])
+        if all(p is not None for p in [minLat, maxLat, minLon, maxLon]):
+            meta["bounds"] = {
+                "east": maxLon,
+                "west": minLon,
+                "north": maxLat,
+                "south": minLat
+            }
+        if startTime is not None and endTime is not None:
+            meta["time"] = {
+                "start": startTime,
+                "stop": endTime,
+                "iso_start": datetime.utcfromtimestamp(int(startTime)).replace(tzinfo=timezone('UTC')).strftime(ISO_8601),
+                "iso_stop": datetime.utcfromtimestamp(int(endTime)).replace(tzinfo=timezone('UTC')).strftime(ISO_8601)
+            }
+        return meta
+
+    def extendMeta(self, minLat, maxLat, minLon, maxLon, ds, startTime, endTime):
+        if self.__meta is None:
+            return None
+        if type(ds) == list:
+            for i in range(0, len(ds)):
+                shortName = ds[i]
+
+                if type(self.__meta) == list:
+                    subMeta = self.__meta[i]
+                else:
+                    subMeta = self.__meta  # Risky
+                self._extendMeta(subMeta, minLat, maxLat, minLon, maxLon, shortName, startTime, endTime)
+        else:
+            if type(self.__meta) == list:
+                self.__meta = self.__meta[0]
+            else:
+                self.__meta = self.__meta  # Risky
+            self._extendMeta(self.__meta, minLat, maxLat, minLon, maxLon, ds, startTime, endTime)
+
+    def toJson(self):
+        data = {
+            'meta': self.__meta,
+            'data': self.__results,
+            'stats': self.__stats
+        }
+        return json.dumps(data, indent=4, cls=CustomEncoder)
+
+    def toImage(self):
+        raise Exception("Not implemented for this result type")
\ No newline at end of file
diff --git a/analysis/webservice/webmodel/RequestParameters.py b/analysis/webservice/webmodel/RequestParameters.py
new file mode 100644
index 0000000..b043cbe
--- /dev/null
+++ b/analysis/webservice/webmodel/RequestParameters.py
@@ -0,0 +1,23 @@
+class RequestParameters(object):
+    SEASONAL_CYCLE_FILTER = "seasonalFilter"
+    MAX_LAT = "maxLat"
+    MIN_LAT = "minLat"
+    MAX_LON = "maxLon"
+    MIN_LON = "minLon"
+    DATASET = "ds"
+    ENVIRONMENT = "env"
+    OUTPUT = "output"
+    START_TIME = "startTime"
+    END_TIME = "endTime"
+    START_YEAR = "startYear"
+    END_YEAR = "endYear"
+    CLIM_MONTH = "month"
+    START_ROW = "start"
+    ROW_COUNT = "numRows"
+    APPLY_LOW_PASS = "lowPassFilter"
+    LOW_CUT = "lowCut"
+    ORDER = "lpOrder"
+    PLOT_SERIES = "plotSeries"
+    PLOT_TYPE = "plotType"
+    NPARTS = "nparts"
+    METADATA_FILTER = "metadataFilter"
\ No newline at end of file
diff --git a/analysis/webservice/webmodel/StandardNexusErrors.py b/analysis/webservice/webmodel/StandardNexusErrors.py
new file mode 100644
index 0000000..08ec616
--- /dev/null
+++ b/analysis/webservice/webmodel/StandardNexusErrors.py
@@ -0,0 +1,4 @@
+class StandardNexusErrors:
+    UNKNOWN = 1000
+    NO_DATA = 1001
+    DATASET_MISSING = 1002
\ No newline at end of file
diff --git a/analysis/webservice/webmodel/StatsComputeOptions.py b/analysis/webservice/webmodel/StatsComputeOptions.py
new file mode 100644
index 0000000..4024fc1
--- /dev/null
+++ b/analysis/webservice/webmodel/StatsComputeOptions.py
@@ -0,0 +1,66 @@
+class StatsComputeOptions(object):
+    def __init__(self):
+        pass
+
+    def get_apply_seasonal_cycle_filter(self, default="false"):
+        raise Exception("Please implement")
+
+    def get_max_lat(self, default=90.0):
+        raise Exception("Please implement")
+
+    def get_min_lat(self, default=-90.0):
+        raise Exception("Please implement")
+
+    def get_max_lon(self, default=180):
+        raise Exception("Please implement")
+
+    def get_min_lon(self, default=-180):
+        raise Exception("Please implement")
+
+    def get_dataset(self):
+        raise Exception("Please implement")
+
+    def get_environment(self):
+        raise Exception("Please implement")
+
+    def get_start_time(self):
+        raise Exception("Please implement")
+
+    def get_end_time(self):
+        raise Exception("Please implement")
+
+    def get_start_year(self):
+        raise Exception("Please implement")
+
+    def get_end_year(self):
+        raise Exception("Please implement")
+
+    def get_clim_month(self):
+        raise Exception("Please implement")
+
+    def get_start_row(self):
+        raise Exception("Please implement")
+
+    def get_end_row(self):
+        raise Exception("Please implement")
+
+    def get_content_type(self):
+        raise Exception("Please implement")
+
+    def get_apply_low_pass_filter(self, default=False):
+        raise Exception("Please implement")
+
+    def get_low_pass_low_cut(self, default=12):
+        raise Exception("Please implement")
+
+    def get_low_pass_order(self, default=9):
+        raise Exception("Please implement")
+
+    def get_plot_series(self, default="mean"):
+        raise Exception("Please implement")
+
+    def get_plot_type(self, default="default"):
+        raise Exception("Please implement")
+
+    def get_nparts(self):
+        raise Exception("Please implement")
\ No newline at end of file
diff --git a/analysis/webservice/webmodel/__init__.py b/analysis/webservice/webmodel/__init__.py
new file mode 100644
index 0000000..d792988
--- /dev/null
+++ b/analysis/webservice/webmodel/__init__.py
@@ -0,0 +1,10 @@
+from .NexusRequestObjectTornadoFree import NexusRequestObjectTornadoFree
+from .NexusRequestObject import NexusRequestObject
+from .RequestParameters import RequestParameters
+from .StatsComputeOptions import StatsComputeOptions
+from .cached import cached
+from .CustomEncoder import CustomEncoder
+from .Exceptions import NexusProcessingException, NoDataException, DatasetNotFoundException
+from .StandardNexusErrors import StandardNexusErrors
+from .CustomEncoder import CustomEncoder
+from .NexusResults import NexusResults
diff --git a/analysis/webservice/webmodel/cached.py b/analysis/webservice/webmodel/cached.py
new file mode 100644
index 0000000..5c4d51a
--- /dev/null
+++ b/analysis/webservice/webmodel/cached.py
@@ -0,0 +1,38 @@
+import hashlib
+import inspect
+import time
+
+__CACHE = {}
+
+def cached(ttl=60000):
+    def _hash_function_signature(func):
+        hash_object = hashlib.md5(str(inspect.getargspec(func)) + str(func))
+        return hash_object.hexdigest()
+
+    def _now():
+        return int(round(time.time() * 1000))
+
+    def _expired(t):
+        if t is None or _now() - t > ttl:
+            return True
+        else:
+            return False
+
+    def _cached_decorator(func):
+
+        def func_wrapper(self, computeOptions, **args):
+            hash = _hash_function_signature(func)
+            force = computeOptions.get_boolean_arg("nocached", default=False)
+
+            if force or hash not in __CACHE or (hash in __CACHE and _expired(__CACHE[hash]["time"])):
+                result = func(self, computeOptions, **args)
+                __CACHE[hash] = {
+                    "time": _now(),
+                    "result": result
+                }
+
+            return __CACHE[hash]["result"]
+
+        return func_wrapper
+
+    return _cached_decorator
\ No newline at end of file
diff --git a/data-access/nexustiles/config/datastores.ini b/data-access/nexustiles/config/datastores.ini.default
similarity index 88%
rename from data-access/nexustiles/config/datastores.ini
rename to data-access/nexustiles/config/datastores.ini.default
index 4795a79..0fe8d9d 100644
--- a/data-access/nexustiles/config/datastores.ini
+++ b/data-access/nexustiles/config/datastores.ini.default
@@ -4,6 +4,7 @@ port=9042
 keyspace=nexustiles
 local_datacenter=datacenter1
 protocol_version=3
+dc_policy=DCAwareRoundRobinPolicy
 
 [s3]
 bucket=nexus-jpl
diff --git a/data-access/nexustiles/dao/CassandraProxy.py b/data-access/nexustiles/dao/CassandraProxy.py
index 49f272d..ed37c5c 100644
--- a/data-access/nexustiles/dao/CassandraProxy.py
+++ b/data-access/nexustiles/dao/CassandraProxy.py
@@ -21,7 +21,7 @@ import nexusproto.DataTile_pb2 as nexusproto
 import numpy as np
 from cassandra.cqlengine import columns, connection, CQLEngineException
 from cassandra.cqlengine.models import Model
-from cassandra.policies import TokenAwarePolicy, DCAwareRoundRobinPolicy
+from cassandra.policies import TokenAwarePolicy, DCAwareRoundRobinPolicy, WhiteListRoundRobinPolicy
 from nexusproto.serialization import from_shaped_array
 
 INIT_LOCK = Lock()
@@ -154,6 +154,8 @@ class CassandraProxy(object):
         self.__cass_keyspace = config.get("cassandra", "keyspace")
         self.__cass_local_DC = config.get("cassandra", "local_datacenter")
         self.__cass_protocol_version = config.getint("cassandra", "protocol_version")
+        self.__cass_dc_policy = config.get("cassandra", "dc_policy")
+
         try:
             self.__cass_port = config.getint("cassandra", "port")
         except NoOptionError:
@@ -167,7 +169,11 @@ class CassandraProxy(object):
 
     def __open(self):
 
-        dc_policy = DCAwareRoundRobinPolicy(self.__cass_local_DC)
+        if self.__cass_dc_policy == 'DCAwareRoundRobinPolicy':
+            dc_policy = DCAwareRoundRobinPolicy(self.__cass_local_DC)
+        elif self.__cass_dc_policy == 'WhiteListRoundRobinPolicy':
+            dc_policy = WhiteListRoundRobinPolicy([self.__cass_url])
+
         token_policy = TokenAwarePolicy(dc_policy)
         connection.setup([host for host in self.__cass_url.split(',')], self.__cass_keyspace,
                          protocol_version=self.__cass_protocol_version, load_balancing_policy=token_policy,
diff --git a/data-access/nexustiles/nexustiles.py b/data-access/nexustiles/nexustiles.py
index c487059..cb8d2e9 100644
--- a/data-access/nexustiles/nexustiles.py
+++ b/data-access/nexustiles/nexustiles.py
@@ -81,12 +81,11 @@ class NexusTileService(object):
         self._metadatastore = None
 
         self._config = ConfigParser.RawConfigParser()
-        self._config.readfp(pkg_resources.resource_stream(__name__, "config/datastores.ini"),
-                            filename='datastores.ini')
+        self._config.read(NexusTileService._get_config_files('config/datastores.ini'))
+
         if config:
             self.override_config(config)
 
-
         if not skipDatastore:
             datastore = self._config.get("datastore", "store")
             if datastore == "cassandra":
@@ -517,3 +516,18 @@ class NexusTileService(object):
             return True
         else:
             return False
+
+    @staticmethod
+    def _get_config_files(filename):
+        log = logging.getLogger(__name__)
+        candidates = []
+        extensions = ['.default', '']
+        for extension in extensions:
+            try:
+                candidate = pkg_resources.resource_filename(__name__, filename + extension)
+                log.info('use config file {}'.format(filename + extension))
+                candidates.append(candidate)
+            except KeyError as ke:
+                log.warning('configuration file {} not found'.format(filename + extension))
+
+        return candidates
diff --git a/data-access/requirements.txt b/data-access/requirements.txt
index e0486ad..5e0f9bc 100644
--- a/data-access/requirements.txt
+++ b/data-access/requirements.txt
@@ -1,5 +1,5 @@
-      cassandra-driver==3.5.0
-      pysolr==3.7.0
-      requests
-      nexusproto
-      Shapely
\ No newline at end of file
+cassandra-driver==3.5.0
+pysolr==3.7.0
+requests
+nexusproto
+Shapely
\ No newline at end of file
diff --git a/data-access/setup.py b/data-access/setup.py
index 87754ff..c03b3f3 100644
--- a/data-access/setup.py
+++ b/data-access/setup.py
@@ -32,7 +32,7 @@ setup(
     long_description=open('README.md').read(),
 
     packages=['nexustiles', 'nexustiles.model', 'nexustiles.dao'],
-    package_data={'nexustiles': ['config/datastores.ini']},
+    package_data={'nexustiles': ['config/datastores.ini.default', 'config/datastores.ini']},
     platforms='any',
     python_requires='~=2.7',
     install_requires=pip_requirements,
diff --git a/docker/nexus-webapp/Dockerfile b/docker/nexus-webapp/Dockerfile
index 632357f..e4f3b20 100644
--- a/docker/nexus-webapp/Dockerfile
+++ b/docker/nexus-webapp/Dockerfile
@@ -14,6 +14,9 @@
 # limitations under the License.
 
 FROM nexusjpl/alpine-pyspark:2.4.4
+#FROM python:2.7
+#FROM alpine:3.12
+
 
 MAINTAINER Apache SDAP "dev@sdap.apache.org"
 
@@ -42,7 +45,8 @@ RUN apk add --update --no-cache \
     python3 \
     bash==4.4.19-r1 \
     libc-dev \
-    libressl2.7-libcrypto 
+    libressl2.7-libcrypto
+
 RUN  apk upgrade musl
 
 WORKDIR /tmp
@@ -87,4 +91,4 @@ RUN python setup.py install
 
 # Upgrade kubernetes client jar from the default version
 RUN rm /opt/spark/jars/kubernetes-client-4.1.2.jar
-ADD https://repo1.maven.org/maven2/io/fabric8/kubernetes-client/4.4.2/kubernetes-client-4.4.2.jar /opt/spark/jars
\ No newline at end of file
+ADD https://repo1.maven.org/maven2/io/fabric8/kubernetes-client/4.4.2/kubernetes-client-4.4.2.jar /opt/spark/jars