You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sdap.apache.org by jj...@apache.org on 2020/05/07 21:01:15 UTC
[incubator-sdap-nexus] branch master updated: SDAP-223: add esri
integration (#94)
This is an automated email from the ASF dual-hosted git repository.
jjacob pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-sdap-nexus.git
The following commit(s) were added to refs/heads/master by this push:
new a253969 SDAP-223: add esri integration (#94)
a253969 is described below
commit a253969dbeba6101c6642f6cd093f59e65faec8f
Author: yepremyana <ye...@gmail.com>
AuthorDate: Thu May 7 14:01:04 2020 -0700
SDAP-223: add esri integration (#94)
* Fix SDAP-223 add esri integration
* remove spark string from time_average_map and time_series
* updated embedded toolbox with hot url and input polygon
* updated toolbox with host url and input polygon
* updated readme for input polygon usage and importing toolboxes
---
.gitignore | 3 +-
client/.idea/client.iml | 11 ---
client/.idea/encodings.xml | 6 --
client/.idea/misc.xml | 4 -
client/.idea/modules.xml | 8 --
client/.idea/vcs.xml | 6 --
integrations/esri/README.md | 41 ++++++++
integrations/esri/nexus_toolbox_embedded.tbx | Bin 0 -> 185856 bytes
.../zipped_toolbox/daily_difference_average.py | 69 ++++++++++++++
.../esri/zipped_toolbox/lat_hof_moeller.py | 105 +++++++++++++++++++++
.../esri/zipped_toolbox/lon_hof_moeller.py | 105 +++++++++++++++++++++
integrations/esri/zipped_toolbox/matchup.py | 80 ++++++++++++++++
integrations/esri/zipped_toolbox/nexus_toolbox.tbx | Bin 0 -> 4096 bytes
.../esri/zipped_toolbox/time_average_map.py | 66 +++++++++++++
.../esri/zipped_toolbox/time_series_script.py | 70 ++++++++++++++
{client => integrations/python-client}/.gitignore | 0
{client => integrations/python-client}/README.md | 0
.../python-client}/docs/nexuscli/index.html | 0
.../python-client}/docs/nexuscli/nexuscli.m.html | 0
.../docs/nexuscli/nexuscli_ow.m.html | 0
.../python-client}/docs/nexuscli/test/index.html | 0
.../docs/nexuscli/test/nexuscli_test.m.html | 0
.../python-client}/nexuscli/__init__.py | 0
.../python-client}/nexuscli/nexuscli.py | 0
.../python-client}/nexuscli/nexuscli_ow.py | 0
.../python-client}/nexuscli/test/__init__.py | 0
.../python-client}/nexuscli/test/nexuscli_test.py | 0
.../python-client}/requirements.txt | 0
{client => integrations/python-client}/setup.py | 0
29 files changed, 538 insertions(+), 36 deletions(-)
diff --git a/.gitignore b/.gitignore
index bb1c70f..ddfe3ca 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
*.vscode
*.code-workspace
-*.idea
\ No newline at end of file
+*.idea
+*.DS_Store
\ No newline at end of file
diff --git a/client/.idea/client.iml b/client/.idea/client.iml
deleted file mode 100644
index c366eea..0000000
--- a/client/.idea/client.iml
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="PYTHON_MODULE" version="4">
- <component name="NewModuleRootManager">
- <content url="file://$MODULE_DIR$" />
- <orderEntry type="jdk" jdkName="Python 3.6 (nexuscli)" jdkType="Python SDK" />
- <orderEntry type="sourceFolder" forTests="false" />
- </component>
- <component name="TestRunnerService">
- <option name="PROJECT_TEST_RUNNER" value="Unittests" />
- </component>
-</module>
\ No newline at end of file
diff --git a/client/.idea/encodings.xml b/client/.idea/encodings.xml
deleted file mode 100644
index 97626ba..0000000
--- a/client/.idea/encodings.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
- <component name="Encoding">
- <file url="PROJECT" charset="UTF-8" />
- </component>
-</project>
\ No newline at end of file
diff --git a/client/.idea/misc.xml b/client/.idea/misc.xml
deleted file mode 100644
index ce3d6ed..0000000
--- a/client/.idea/misc.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
- <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.6 (nexuscli)" project-jdk-type="Python SDK" />
-</project>
\ No newline at end of file
diff --git a/client/.idea/modules.xml b/client/.idea/modules.xml
deleted file mode 100644
index 0742ebc..0000000
--- a/client/.idea/modules.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
- <component name="ProjectModuleManager">
- <modules>
- <module fileurl="file://$PROJECT_DIR$/.idea/client.iml" filepath="$PROJECT_DIR$/.idea/client.iml" />
- </modules>
- </component>
-</project>
\ No newline at end of file
diff --git a/client/.idea/vcs.xml b/client/.idea/vcs.xml
deleted file mode 100644
index 6c0b863..0000000
--- a/client/.idea/vcs.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
- <component name="VcsDirectoryMappings">
- <mapping directory="$PROJECT_DIR$/.." vcs="Git" />
- </component>
-</project>
\ No newline at end of file
diff --git a/integrations/esri/README.md b/integrations/esri/README.md
new file mode 100644
index 0000000..4698157
--- /dev/null
+++ b/integrations/esri/README.md
@@ -0,0 +1,41 @@
+# SDAP ArcGIS Tools
+
+Toolbox and scripts for utilizing SDAP analytics within ArcGIS
+
+## Contents
+
+`zipped_toolbox`: Contains the python scripts and the toolbox.
+
+`nexus_toolbox_embedded.tbx`: toolbox which contains the SDAP scripts embedded into the toolbox.
+
+## Usage
+
+### Development
+For development please use the `zipped_toolbox`. You may make your changes directly to the python scripts
+
+Once you have downloaded the toolbox with the script you can upload the toolbox into ArcGIS pro.
+Please follow the instructions written https://pro.arcgis.com/en/pro-app/help/projects/connect-to-a-toolbox.htm to connect a toolbox.
+
+When your toolbox is inside of your ArcGIS project, you can then proceed to utilize the tool.
+
+To make changes to the GUI located in the catalog window on the right of your ArcGIS project, you will right click on
+the script within the toolbox, ie `DailyDifferenceAverage`, and you will click on `properties`. A window will pop up.
+On the left column of the window you will see `Validation`. Click on `Validation`. There you can manipulate the code to
+customize the GUI and the parameters. You may also set parameters in the `parameters` pane within the `properties` window.
+
+#### Known Issues
+If you encounter any issues using the tool this can be because it is not pointing to the correct location of the script.
+To fix this, you will right click on the script within the toolbox, ie `DailyDifferenceAverage`, and you will click on
+`properties`. A window will pop and you will see a path pointing to the `daily_difference_average.py`. Ensure that this
+is the correct path for the script.
+
+When creating an interactive polygon in the toolbox, click on the pencil, create your shape within the ArcGIS pro map,
+and double click on the red dot when finished drawing the shape. You may also right click on the final red dot and click
+Finish.
+
+### User
+For usage purposes please use `nexus_toolbox_embedded.tbx`. This toolbox has the python scripts embedded within which
+means that you cannot change the underlying script codes for the tools. To use, you can simply make a connection to this
+toolbox (after downloading) in the Catalog by right clicking Toolboxes > Add Toolbox > and identifying `nexus_toolbox_embedded.tbx`.
+If you are a developer, please refer to the section above.
+
diff --git a/integrations/esri/nexus_toolbox_embedded.tbx b/integrations/esri/nexus_toolbox_embedded.tbx
new file mode 100644
index 0000000..147194d
Binary files /dev/null and b/integrations/esri/nexus_toolbox_embedded.tbx differ
diff --git a/integrations/esri/zipped_toolbox/daily_difference_average.py b/integrations/esri/zipped_toolbox/daily_difference_average.py
new file mode 100755
index 0000000..3db71f6
--- /dev/null
+++ b/integrations/esri/zipped_toolbox/daily_difference_average.py
@@ -0,0 +1,69 @@
+
+"""-----------------------------------------------------------------------------
+ Script Name: NEXUS Daily Difference Graph
+ Description: Creates daily difference graph from
+ a given dataset
+ Created By: Alice Yepremyan
+ Date: 1/28/2020
+-----------------------------------------------------------------------------"""
+
+import arcpy
+import json
+import requests
+import datetime
+import matplotlib.pyplot as plt
+import pandas as pd
+
+arcpy.env.overwriteOutput = True
+
+# Get the input parameters
+host_url = arcpy.GetParameterAsText(0)
+ds = arcpy.GetParameterAsText(1)
+ds2 = arcpy.GetParameterAsText(2)
+input_feature = arcpy.GetParameter(3)
+start_time = pd.to_datetime(arcpy.GetParameterAsText(4)).strftime('%Y-%m-%dT%H:%M:%SZ')
+end_time = pd.to_datetime(arcpy.GetParameterAsText(5)).strftime('%Y-%m-%dT%H:%M:%SZ')
+
+# get coordinates by calculating geometric attributes
+arcpy.MakeFeatureLayer_management(input_feature, "layer")
+arcpy.AddGeometryAttributes_management("layer", "EXTENT")
+
+rows = arcpy.SearchCursor("layer", fields="EXT_MIN_X;EXT_MIN_Y;EXT_MAX_X;EXT_MAX_Y")
+row = rows.next()
+min_lon = row.getValue("EXT_MIN_X")
+max_lon = row.getValue("EXT_MAX_X")
+min_lat = row.getValue("EXT_MIN_Y")
+max_lat = row.getValue("EXT_MAX_Y")
+
+# Build the HTTP request
+url = 'https://{}/dailydifferenceaverage_spark?dataset={}&climatology={}&b={},{},{},{}&startTime={}&endTime={}'.format(host_url,ds, ds2, min_lat, min_lon, max_lat, max_lon, start_time, end_time)
+#url = 'https://{}/dailydifferenceaverage_spark?dataset=SMAP_SSS_L3_MONTHLY_500&climatology=SMAP_SSS_L3_MONTHLY_500_CLIM&b=-150,45,-120,60&startTime=2015-04-16T00:00:00Z&endTime=2018-04-14T00:00:00Z'
+arcpy.AddMessage('{}'.format(url))
+
+# Report a success message
+arcpy.AddMessage("Url received, getting json")
+
+# Send request to server
+response = str(requests.get(url, verify=False).text)
+dda_amce = json.loads(response)
+arcpy.AddMessage('{}'.format(response))
+
+means = []
+dates = []
+st_ds = []
+
+# Sampling every 30th data point to reduce plot noise
+for i, data in enumerate(dda_amce['data']):
+ means.append(data[0]['mean'])
+ dates.append(datetime.datetime.fromtimestamp((data[0]['time'])))
+ st_ds.append(data[0]['std'])
+
+# Plot the extracted means
+plt.figure(figsize=(10, 5), dpi=100)
+lines = plt.errorbar(dates, means, st_ds)
+plt.xlim(dates[0], dates[-1])
+plt.xlabel('Time', fontsize=16)
+plt.ylim(min(means)-1, max(means)+1)
+plt.ylabel('Temperature Difference (K)', fontsize=16)
+plt.title('Temperature Difference from Average (K) with Standard Deviation Error Bars', fontsize=18)
+plt.show()
diff --git a/integrations/esri/zipped_toolbox/lat_hof_moeller.py b/integrations/esri/zipped_toolbox/lat_hof_moeller.py
new file mode 100755
index 0000000..9f06206
--- /dev/null
+++ b/integrations/esri/zipped_toolbox/lat_hof_moeller.py
@@ -0,0 +1,105 @@
+
+"""-----------------------------------------------------------------------------
+ Script Name: NEXUS Latitude/Time HofMoeller Spark
+ Description: Computes a latitude/time HofMoeller plot given an arbitrary
+ geographical area and time range
+ Created By: Alice Yepremyan
+ Date: 2/18/2020
+-----------------------------------------------------------------------------"""
+
+import arcpy
+import json
+import requests
+import datetime
+import matplotlib.pyplot as plt
+from matplotlib.ticker import FuncFormatter
+from matplotlib import cm
+import numpy as np
+import pandas as pd
+
+arcpy.env.overwriteOutput = True
+
+def createHoffmueller(data, coordSeries, timeSeries, coordName, title, interpolate='nearest'):
+
+ cmap = cm.coolwarm
+ # ls = LightSource(315, 45)
+ # rgb = ls.shade(data, cmap)
+
+ fig, ax = plt.subplots()
+ fig.set_size_inches(11.0, 8.5)
+ cax = ax.imshow(data, interpolation=interpolate, cmap=cmap)
+
+ def yFormatter(y, pos):
+ if y < len(coordSeries):
+ return "%s $^\circ$" % (int(coordSeries[int(y)] * 100.0) / 100.)
+ else:
+ return ""
+
+ def xFormatter(x, pos):
+ if x < len(timeSeries):
+ return timeSeries[int(x)].strftime('%b %Y')
+ else:
+ return ""
+
+ ax.xaxis.set_major_formatter(FuncFormatter(xFormatter))
+ ax.yaxis.set_major_formatter(FuncFormatter(yFormatter))
+
+ ax.set_title(title)
+ ax.set_ylabel(coordName)
+ ax.set_xlabel('Date')
+
+ fig.colorbar(cax)
+ fig.autofmt_xdate()
+
+ plt.show()
+
+def createLatitudeHoffmueller(res, meta):
+ latSeries = [m['latitude'] for m in res[0]['lats']]
+ timeSeries = [datetime.datetime.fromtimestamp(m['time']) for m in res]
+
+ data = np.zeros((len(latSeries), len(timeSeries)))
+
+ for t in range(0, len(timeSeries)):
+ timeSet = res[t]
+ for l in range(0, len(latSeries)):
+ latSet = timeSet['lats'][l]
+ value = latSet['mean']
+ data[len(latSeries) - l - 1][t] = value
+
+ title = meta['shortName']
+ dateRange = "%s - %s" % (timeSeries[0].strftime('%b %Y'), timeSeries[-1].strftime('%b %Y'))
+
+ return createHoffmueller(data, latSeries, timeSeries, "Latitude", title="%s\n%s" % (title, dateRange),
+ interpolate='nearest')
+
+# Get the input parameters
+host_url = arcpy.GetParameterAsText(0)
+dataset = arcpy.GetParameterAsText(1)
+input_feature = arcpy.GetParameter(2)
+start_time = pd.to_datetime(arcpy.GetParameterAsText(3)).strftime('%Y-%m-%dT%H:%M:%SZ')
+end_time = pd.to_datetime(arcpy.GetParameterAsText(4)).strftime('%Y-%m-%dT%H:%M:%SZ')
+
+# get coordinates by calculating geometric attributes
+arcpy.MakeFeatureLayer_management(input_feature, "layer")
+arcpy.AddGeometryAttributes_management("layer", "EXTENT")
+
+rows = arcpy.SearchCursor("layer", fields="EXT_MIN_X;EXT_MIN_Y;EXT_MAX_X;EXT_MAX_Y")
+row = rows.next()
+min_lon = row.getValue("EXT_MIN_X")
+max_lon = row.getValue("EXT_MAX_X")
+min_lat = row.getValue("EXT_MIN_Y")
+max_lat = row.getValue("EXT_MAX_Y")
+
+# Build the HTTP request
+url = f"https://{host_url}/latitudeTimeHofMoellerSpark?ds={dataset}&b={max_lat},{min_lon},{min_lat},{max_lon}&startTime={start_time}&endTime={end_time}"
+#url = "https://{}/latitudeTimeHofMoellerSpark?ds=TELLUS_GRACE_MASCON_CRI_GRID_RL06_V1_OCEAN&b=-30,15,-45,30&startTime=2010-02-01T00:00:00Z&endTime=2014-01-01T00:00:00Z"
+arcpy.AddMessage('{}'.format(url))
+
+# Report a success message
+arcpy.AddMessage("Url received, getting json")
+
+ts = json.loads(str(requests.get(url).text))
+
+# Plot Hoffmueller
+createLatitudeHoffmueller(ts["data"], ts["meta"])
+
diff --git a/integrations/esri/zipped_toolbox/lon_hof_moeller.py b/integrations/esri/zipped_toolbox/lon_hof_moeller.py
new file mode 100755
index 0000000..374a11a
--- /dev/null
+++ b/integrations/esri/zipped_toolbox/lon_hof_moeller.py
@@ -0,0 +1,105 @@
+
+"""-----------------------------------------------------------------------------
+ Script Name: NEXUS Longitude/Time HofMoeller Spark
+ Description: Computes a longitude/time HofMoeller plot given an arbitrary
+ geographical area and time range
+ Created By: Alice Yepremyan
+ Date: 2/18/2020
+-----------------------------------------------------------------------------"""
+
+import arcpy
+import json
+import requests
+import datetime
+import matplotlib.pyplot as plt
+from matplotlib.ticker import FuncFormatter
+from matplotlib import cm
+import numpy as np
+import pandas as pd
+
+arcpy.env.overwriteOutput = True
+
+def createHoffmueller(data, coordSeries, timeSeries, coordName, title, interpolate='nearest'):
+
+ cmap = cm.coolwarm
+ # ls = LightSource(315, 45)
+ # rgb = ls.shade(data, cmap)
+
+ fig, ax = plt.subplots()
+ fig.set_size_inches(11.0, 8.5)
+ cax = ax.imshow(data, interpolation=interpolate, cmap=cmap)
+
+ def yFormatter(y, pos):
+ if y < len(coordSeries):
+ return "%s $^\circ$" % (int(coordSeries[int(y)] * 100.0) / 100.)
+ else:
+ return ""
+
+ def xFormatter(x, pos):
+ if x < len(timeSeries):
+ return timeSeries[int(x)].strftime('%b %Y')
+ else:
+ return ""
+
+ ax.xaxis.set_major_formatter(FuncFormatter(xFormatter))
+ ax.yaxis.set_major_formatter(FuncFormatter(yFormatter))
+
+ ax.set_title(title)
+ ax.set_ylabel(coordName)
+ ax.set_xlabel('Date')
+
+ fig.colorbar(cax)
+ fig.autofmt_xdate()
+
+ plt.show()
+
+def createLongitudeHoffmueller(res, meta):
+ lonSeries = [m['longitude'] for m in res[0]['lons']]
+ timeSeries = [datetime.datetime.fromtimestamp(m['time']) for m in res]
+
+ data = np.zeros((len(lonSeries), len(timeSeries)))
+
+ for t in range(0, len(timeSeries)):
+ timeSet = res[t]
+ for l in range(0, len(lonSeries)):
+ latSet = timeSet['lons'][l]
+ value = latSet['mean']
+ data[len(lonSeries) - l - 1][t] = value
+
+ title = meta['shortName']
+ dateRange = "%s - %s" % (timeSeries[0].strftime('%b %Y'), timeSeries[-1].strftime('%b %Y'))
+
+ return createHoffmueller(data, lonSeries, timeSeries, "Longitude", "%s\n%s" % (title, dateRange),
+ interpolate='nearest')
+
+# Get the input parameters
+host_url = arcpy.GetParameterAsText(0)
+dataset = arcpy.GetParameterAsText(1)
+input_feature = arcpy.GetParameter(2)
+start_time = pd.to_datetime(arcpy.GetParameterAsText(3)).strftime('%Y-%m-%dT%H:%M:%SZ')
+end_time = pd.to_datetime(arcpy.GetParameterAsText(4)).strftime('%Y-%m-%dT%H:%M:%SZ')
+
+# get coordinates by calculating geometric attributes
+arcpy.MakeFeatureLayer_management(input_feature, "layer")
+arcpy.AddGeometryAttributes_management("layer", "EXTENT")
+
+rows = arcpy.SearchCursor("layer", fields="EXT_MIN_X;EXT_MIN_Y;EXT_MAX_X;EXT_MAX_Y")
+row = rows.next()
+min_lon = row.getValue("EXT_MIN_X")
+max_lon = row.getValue("EXT_MAX_X")
+min_lat = row.getValue("EXT_MIN_Y")
+max_lat = row.getValue("EXT_MAX_Y")
+
+# Build the HTTP request
+url = f"https://{host_url}/longitudeTimeHofMoellerSpark?ds={dataset}&b={max_lat},{min_lon},{min_lat},{max_lon}&startTime={start_time}&endTime={end_time}"
+#url = "https://{}/longitudeTimeHofMoellerSpark?ds=TELLUS_GRACE_MASCON_CRI_GRID_RL06_V1_OCEAN&b=-30,15,-45,30&startTime=2010-02-01T00:00:00Z&endTime=2014-01-01T00:00:00Z"
+arcpy.AddMessage('{}'.format(url))
+
+# Report a success message
+arcpy.AddMessage("Url received, getting json")
+
+ts = json.loads(str(requests.get(url).text))
+
+# Plot Hoffmueller
+createLongitudeHoffmueller(ts["data"], ts["meta"])
+
diff --git a/integrations/esri/zipped_toolbox/matchup.py b/integrations/esri/zipped_toolbox/matchup.py
new file mode 100755
index 0000000..1a1a8de
--- /dev/null
+++ b/integrations/esri/zipped_toolbox/matchup.py
@@ -0,0 +1,80 @@
+
+"""-----------------------------------------------------------------------------
+ Script Name: NEXUS Matchup
+ Description: Match measurements between two or more datasets.
+ Created By: Alice Yepremyan
+ Date: 12/12/2019
+-----------------------------------------------------------------------------"""
+
+import arcpy
+import json
+import requests
+import datetime
+import numpy as np
+import matplotlib.pyplot as plt
+import pandas as pd
+
+arcpy.env.overwriteOutput = True
+
+
+def show_plot(x_data, y_data, x_label, y_label):
+ """
+ Display a simple line plot.
+
+ :param x_data: Numpy array containing data for the X axis
+ :param y_data: Numpy array containing data for the Y axis
+ :param x_label: Label applied to X axis
+ :param y_label: Label applied to Y axis
+ """
+ np.random.seed(19680801)
+ plt.figure(figsize=(10, 5), dpi=100)
+ plt.scatter(x_data, y_data, alpha=0.5)
+ plt.grid(b=True, which='major', color='k', linestyle='-')
+ plt.xlabel(x_label)
+ plt.ylabel(y_label)
+ plt.show()
+
+# Get the input parameters
+host_url = arcpy.GetParameterAsText(0)
+primary = arcpy.GetParameterAsText(1)
+secondary = arcpy.GetParameterAsText(2)
+input_feature = arcpy.GetParameter(3)
+start_time = pd.to_datetime(arcpy.GetParameterAsText(4)).strftime('%Y-%m-%dT%H:%M:%SZ')
+end_time = pd.to_datetime(arcpy.GetParameterAsText(5)).strftime('%Y-%m-%dT%H:%M:%SZ')
+parameter = arcpy.GetParameterAsText(6)
+depth_min = arcpy.GetParameterAsText(7)
+depth_max = arcpy.GetParameterAsText(8)
+tt = arcpy.GetParameterAsText(9)
+rt = arcpy.GetParameterAsText(10)
+platforms = arcpy.GetParameterAsText(11)
+
+# get coordinates by calculating geometric attributes
+arcpy.MakeFeatureLayer_management(input_feature, "layer")
+arcpy.AddGeometryAttributes_management("layer", "EXTENT")
+
+rows = arcpy.SearchCursor("layer", fields="EXT_MIN_X;EXT_MIN_Y;EXT_MAX_X;EXT_MAX_Y")
+row = rows.next()
+min_lon = row.getValue("EXT_MIN_X")
+max_lon = row.getValue("EXT_MAX_X")
+min_lat = row.getValue("EXT_MIN_Y")
+max_lat = row.getValue("EXT_MAX_Y")
+
+# Build the HTTP request
+url = f"https://{host_url}/match_spark?primary={primary}&matchup={secondary}&startTime={start_time}&endTime={end_time}&tt={tt}&rt={rt}&b={max_lat},{min_lon},{min_lat},{max_lon}&platforms={platforms}¶meter={parameter}&matchOne=true&depthMin={depth_min}&depthMax={depth_max}"
+# url = "https://{}/match_spark?primary=AVHRR_OI_L4_GHRSST_NCEI&matchup=spurs&startTime=2013-10-01T00:00:00Z&endTime=2013-10-30T23:59:59Z&tt=86400&rt=10000.0&b=-30,15,-45,30&platforms=1,2,3,4,5,6,7,8,9¶meter=sst&matchOne=true&depthMin=0&depthMax=5"
+
+# Report a success message
+arcpy.AddMessage("Url received, getting json")
+
+ts = json.loads(str(requests.get(url).text))
+
+
+satellite = []
+in_situ = []
+for data in ts ['data']:
+ for matches in data ['matches']:
+ satellite.append(data['sea_water_temperature'])
+ in_situ.append(matches['sea_water_temperature'])
+
+# Plot matchup
+show_plot(in_situ, satellite, secondary+' (c)', primary+' (c)')
\ No newline at end of file
diff --git a/integrations/esri/zipped_toolbox/nexus_toolbox.tbx b/integrations/esri/zipped_toolbox/nexus_toolbox.tbx
new file mode 100755
index 0000000..34a3a9a
Binary files /dev/null and b/integrations/esri/zipped_toolbox/nexus_toolbox.tbx differ
diff --git a/integrations/esri/zipped_toolbox/time_average_map.py b/integrations/esri/zipped_toolbox/time_average_map.py
new file mode 100755
index 0000000..65d00c4
--- /dev/null
+++ b/integrations/esri/zipped_toolbox/time_average_map.py
@@ -0,0 +1,66 @@
+
+"""-----------------------------------------------------------------------------
+ Script Name: NEXUS Time average map
+ Description: Creates time-average graph from
+ a given dataset
+ Created By: Alice Yepremyan
+ Date: 1/20/2020
+-----------------------------------------------------------------------------"""
+
+import arcpy
+import json
+import requests
+import datetime
+import matplotlib.pyplot as plt
+import matplotlib as mpl
+import pandas as pd
+
+arcpy.env.overwriteOutput = True
+
+# Get the input parameters
+host_url = arcpy.GetParameterAsText(0)
+ds = arcpy.GetParameterAsText(1)
+input_feature = arcpy.GetParameter(2)
+start_time = pd.to_datetime(arcpy.GetParameterAsText(3)).strftime('%Y-%m-%dT%H:%M:%SZ')
+end_time = pd.to_datetime(arcpy.GetParameterAsText(4)).strftime('%Y-%m-%dT%H:%M:%SZ')
+
+# get coordinates by calculating geometric attributes
+arcpy.MakeFeatureLayer_management(input_feature, "layer")
+arcpy.AddGeometryAttributes_management("layer", "EXTENT")
+
+rows = arcpy.SearchCursor("layer", fields="EXT_MIN_X;EXT_MIN_Y;EXT_MAX_X;EXT_MAX_Y")
+row = rows.next()
+min_lon = row.getValue("EXT_MIN_X")
+max_lon = row.getValue("EXT_MAX_X")
+min_lat = row.getValue("EXT_MIN_Y")
+max_lat = row.getValue("EXT_MAX_Y")
+
+# Build the HTTP request
+request = "https://{}/timeAvgMapSpark?ds={}&startTime={}&endTime={}&minLon={}&minLat={}&maxLon={}&maxLat={}" \
+ .format(host_url, ds, start_time, end_time, min_lon, min_lat, max_lon, max_lat)
+#request = 'https://{}/timeAvgMapSpark?ds=AVHRR_OI_L4_GHRSST_NCEI&minLat=-5&minLon=-170&maxLat=5&maxLon=-120&startTime=1356998400&endTime=1383091200'
+arcpy.AddMessage('{}'.format(request))
+
+# Report a success message
+arcpy.AddMessage("Url received, getting json")
+
+# Send request to server
+response = requests.get(request).json()
+arcpy.AddMessage('{}'.format(response))
+
+# Parse the response and create an image
+lons = [point['lon'] for point in response['data'][0]]
+lats = [a_list[0]['lat'] for a_list in response['data']]
+
+my_list = numpy.ndarray((len(lats), len(lons)))
+for x in range(0, len(lats)):
+ for y in range(0, len(lons)):
+ my_list[x][y] = response['data'][x][y]['mean']
+
+norm = mpl.colors.Normalize(vmin=my_list.min(), vmax=my_list.max())
+
+# Plot the time average
+fig, ax1 = plt.subplots(figsize=(10, 5), dpi=100)
+ax1.pcolormesh(lons, lats, my_list, vmin=my_list.min(), vmax=my_list.max(), cmap='gist_rainbow')
+plt.title('Time Average Map')
+plt.show()
diff --git a/integrations/esri/zipped_toolbox/time_series_script.py b/integrations/esri/zipped_toolbox/time_series_script.py
new file mode 100755
index 0000000..94d6fa7
--- /dev/null
+++ b/integrations/esri/zipped_toolbox/time_series_script.py
@@ -0,0 +1,70 @@
+
+"""-----------------------------------------------------------------------------
+ Script Name: NEXUS Time Series
+ Description: Creates time-series graph from
+ a given dataset
+ Created By: Alice Yepremyan
+ Date: 12/12/2019
+-----------------------------------------------------------------------------"""
+
+import arcpy
+import json
+import requests
+import datetime
+import matplotlib.pyplot as plt
+import pandas as pd
+
+arcpy.env.overwriteOutput = True
+
+# Get the input parameters
+host_url = arcpy.GetParameterAsText(0)
+ds = arcpy.GetParameterAsText(1)
+input_feature = arcpy.GetParameter(2)
+start_time = pd.to_datetime(arcpy.GetParameterAsText(3)).strftime('%Y-%m-%dT%H:%M:%SZ')
+end_time = pd.to_datetime(arcpy.GetParameterAsText(4)).strftime('%Y-%m-%dT%H:%M:%SZ')
+
+# get coordinates by calculating geometric attributes
+arcpy.MakeFeatureLayer_management(input_feature, "layer")
+arcpy.AddGeometryAttributes_management("layer", "EXTENT")
+
+rows = arcpy.SearchCursor("layer", fields="EXT_MIN_X;EXT_MIN_Y;EXT_MAX_X;EXT_MAX_Y")
+row = rows.next()
+min_lon = row.getValue("EXT_MIN_X")
+max_lon = row.getValue("EXT_MAX_X")
+min_lat = row.getValue("EXT_MIN_Y")
+max_lat = row.getValue("EXT_MAX_Y")
+
+# Build the HTTP request
+url = 'https://{}/timeSeriesSpark?ds={}&minLat={}&minLon={}&maxLat={}&maxLon={}&startTime={}&endTime={}'.format(host_url,ds, min_lat, min_lon, max_lat, max_lon, start_time, end_time)
+#url = 'https://{}/timeSeriesSpark?ds=AVHRR_OI_L4_GHRSST_NCEI&minLat=45&minLon=-150&maxLat=60&maxLon=-120&startTime=2008-09-01T00:00:00Z&endTime=2015-10-01T23:59:59Z'
+arcpy.AddMessage('{}'.format(url))
+
+# Report a success message
+arcpy.AddMessage("Url received, getting json")
+
+ts = json.loads(str(requests.get(url).text))
+arcpy.AddMessage('{}'.format(ts))
+means = []
+dates = []
+
+# ToDo: should include a try except if no ts['data']
+for data in ts['data']:
+ means.append(data[0]['mean'])
+ d = datetime.datetime.fromtimestamp((data[0]['time']))
+ dates.append(d)
+
+arcpy.AddMessage('This is the means: {}'.format(means))
+
+# Plot the extracted means
+plt.figure(figsize=(10, 5), dpi=100)
+lines = plt.plot(dates, means)
+plt.setp(lines, color='r', linewidth=1.0, linestyle='--',
+ dash_capstyle='round', marker='.', markersize=5.0, mfc='r')
+plt.grid(b=True, which='major', color='k', linestyle='--')
+plt.xlim(dates[0], dates[-1])
+plt.xlabel('Time')
+plt.ylim(min(means), max(means))
+plt.ylabel('Temperature (K)')
+plt.title('Time Series')
+plt.show()
+
diff --git a/client/.gitignore b/integrations/python-client/.gitignore
similarity index 100%
rename from client/.gitignore
rename to integrations/python-client/.gitignore
diff --git a/client/README.md b/integrations/python-client/README.md
similarity index 100%
rename from client/README.md
rename to integrations/python-client/README.md
diff --git a/client/docs/nexuscli/index.html b/integrations/python-client/docs/nexuscli/index.html
similarity index 100%
rename from client/docs/nexuscli/index.html
rename to integrations/python-client/docs/nexuscli/index.html
diff --git a/client/docs/nexuscli/nexuscli.m.html b/integrations/python-client/docs/nexuscli/nexuscli.m.html
similarity index 100%
rename from client/docs/nexuscli/nexuscli.m.html
rename to integrations/python-client/docs/nexuscli/nexuscli.m.html
diff --git a/client/docs/nexuscli/nexuscli_ow.m.html b/integrations/python-client/docs/nexuscli/nexuscli_ow.m.html
similarity index 100%
rename from client/docs/nexuscli/nexuscli_ow.m.html
rename to integrations/python-client/docs/nexuscli/nexuscli_ow.m.html
diff --git a/client/docs/nexuscli/test/index.html b/integrations/python-client/docs/nexuscli/test/index.html
similarity index 100%
rename from client/docs/nexuscli/test/index.html
rename to integrations/python-client/docs/nexuscli/test/index.html
diff --git a/client/docs/nexuscli/test/nexuscli_test.m.html b/integrations/python-client/docs/nexuscli/test/nexuscli_test.m.html
similarity index 100%
rename from client/docs/nexuscli/test/nexuscli_test.m.html
rename to integrations/python-client/docs/nexuscli/test/nexuscli_test.m.html
diff --git a/client/nexuscli/__init__.py b/integrations/python-client/nexuscli/__init__.py
similarity index 100%
rename from client/nexuscli/__init__.py
rename to integrations/python-client/nexuscli/__init__.py
diff --git a/client/nexuscli/nexuscli.py b/integrations/python-client/nexuscli/nexuscli.py
similarity index 100%
rename from client/nexuscli/nexuscli.py
rename to integrations/python-client/nexuscli/nexuscli.py
diff --git a/client/nexuscli/nexuscli_ow.py b/integrations/python-client/nexuscli/nexuscli_ow.py
similarity index 100%
rename from client/nexuscli/nexuscli_ow.py
rename to integrations/python-client/nexuscli/nexuscli_ow.py
diff --git a/client/nexuscli/test/__init__.py b/integrations/python-client/nexuscli/test/__init__.py
similarity index 100%
rename from client/nexuscli/test/__init__.py
rename to integrations/python-client/nexuscli/test/__init__.py
diff --git a/client/nexuscli/test/nexuscli_test.py b/integrations/python-client/nexuscli/test/nexuscli_test.py
similarity index 100%
rename from client/nexuscli/test/nexuscli_test.py
rename to integrations/python-client/nexuscli/test/nexuscli_test.py
diff --git a/client/requirements.txt b/integrations/python-client/requirements.txt
similarity index 100%
rename from client/requirements.txt
rename to integrations/python-client/requirements.txt
diff --git a/client/setup.py b/integrations/python-client/setup.py
similarity index 100%
rename from client/setup.py
rename to integrations/python-client/setup.py