You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sdap.apache.org by le...@apache.org on 2017/10/27 22:40:28 UTC
[51/51] [partial] incubator-sdap-nexus git commit: SDAP-1 Import all
code under the SDAP SGA
SDAP-1 Import all code under the SDAP SGA
Project: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/commit/ff98fa34
Tree: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/tree/ff98fa34
Diff: http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/diff/ff98fa34
Branch: refs/heads/master
Commit: ff98fa346303431542b8391cc2a1bf7561d1bd03
Parents:
Author: Lewis John McGibbney <le...@gmail.com>
Authored: Fri Oct 27 15:39:29 2017 -0700
Committer: Lewis John McGibbney <le...@gmail.com>
Committed: Fri Oct 27 15:39:29 2017 -0700
----------------------------------------------------------------------
LICENSE | 201 +
README.md | 118 +
analysis/.DS_Store | Bin 0 -> 6148 bytes
analysis/.gitignore | 6 +
analysis/README.md | 25 +
analysis/package-list.txt | 52 +
analysis/requirements.txt | 51 +
analysis/setup.py | 46 +
analysis/static/index.html | 1 +
analysis/test.py | 13 +
analysis/tests/__init__.py | 4 +
.../algorithms/StandardDeviationSearch_test.py | 212 +
.../algorithms/longitudelatitudemap_test.py | 83 +
analysis/tests/algorithms_spark/Matchup_test.py | 311 ++
analysis/tests/algorithms_spark/__init__.py | 4 +
analysis/webservice/Filtering.py | 157 +
analysis/webservice/LayerConfig.py | 61 +
analysis/webservice/NexusHandler.py | 554 ++
analysis/webservice/__init__.py | 0
analysis/webservice/algorithms/Capabilities.py | 43 +
.../webservice/algorithms/CorrelationMap.py | 129 +
.../algorithms/DailyDifferenceAverage.py | 230 +
.../webservice/algorithms/DataInBoundsSearch.py | 205 +
.../webservice/algorithms/DataSeriesList.py | 30 +
analysis/webservice/algorithms/DelayTest.py | 29 +
.../webservice/algorithms/ErrorTosserTest.py | 23 +
analysis/webservice/algorithms/Heartbeat.py | 40 +
analysis/webservice/algorithms/HofMoeller.py | 362 ++
.../algorithms/LongitudeLatitudeMap.py | 249 +
.../algorithms/StandardDeviationSearch.py | 191 +
.../webservice/algorithms/TestInitializer.py | 16 +
analysis/webservice/algorithms/TileSearch.py | 71 +
analysis/webservice/algorithms/TimeAvgMap.py | 265 +
analysis/webservice/algorithms/TimeSeries.py | 549 ++
.../webservice/algorithms/TimeSeriesSolr.py | 342 ++
analysis/webservice/algorithms/__init__.py | 20 +
.../algorithms/doms/BaseDomsHandler.py | 709 +++
.../algorithms/doms/DatasetListQuery.py | 106 +
.../algorithms/doms/DomsInitialization.py | 133 +
.../webservice/algorithms/doms/MatchupQuery.py | 436 ++
.../webservice/algorithms/doms/MetadataQuery.py | 51 +
.../algorithms/doms/ResultsPlotQuery.py | 40 +
.../algorithms/doms/ResultsRetrieval.py | 34 +
.../algorithms/doms/ResultsStorage.py | 275 +
.../webservice/algorithms/doms/StatsQuery.py | 52 +
.../webservice/algorithms/doms/ValuesQuery.py | 56 +
analysis/webservice/algorithms/doms/__init__.py | 22 +
analysis/webservice/algorithms/doms/config.py | 83 +
.../webservice/algorithms/doms/datafetch.py | 29 +
.../webservice/algorithms/doms/domsconfig.ini | 13 +
.../webservice/algorithms/doms/fetchedgeimpl.py | 201 +
analysis/webservice/algorithms/doms/geo.py | 113 +
.../webservice/algorithms/doms/histogramplot.py | 117 +
.../webservice/algorithms/doms/insitusubset.py | 248 +
analysis/webservice/algorithms/doms/mapplot.py | 171 +
.../webservice/algorithms/doms/scatterplot.py | 112 +
.../webservice/algorithms/doms/subsetter.py | 245 +
analysis/webservice/algorithms/doms/values.py | 57 +
.../webservice/algorithms/doms/workerthread.py | 51 +
.../webservice/algorithms_spark/ClimMapSpark.py | 257 +
.../webservice/algorithms_spark/CorrMapSpark.py | 316 ++
.../DailyDifferenceAverageSpark.py | 391 ++
.../algorithms_spark/HofMoellerSpark.py | 331 ++
analysis/webservice/algorithms_spark/Matchup.py | 691 +++
.../algorithms_spark/TimeAvgMapSpark.py | 248 +
.../algorithms_spark/TimeSeriesSpark.py | 554 ++
.../webservice/algorithms_spark/__init__.py | 58 +
analysis/webservice/config/algorithms.ini | 5 +
analysis/webservice/config/web.ini | 11 +
analysis/webservice/matserver.py | 30 +
analysis/webservice/plotting.py | 552 ++
analysis/webservice/webapp.py | 230 +
analysis/webservice/webmodel.py | 519 ++
client/README.md | 0
client/docs/nexuscli/index.html | 1130 ++++
client/docs/nexuscli/nexuscli.m.html | 1687 ++++++
client/docs/nexuscli/test/index.html | 1109 ++++
client/docs/nexuscli/test/nexuscli_test.m.html | 3529 ++++++++++++
client/nexuscli/__init__.py | 9 +
client/nexuscli/nexuscli.py | 198 +
client/nexuscli/test/__init__.py | 4 +
client/nexuscli/test/nexuscli_test.py | 30 +
client/requirements.txt | 14 +
client/setup.py | 36 +
climatology/.gitignore | 11 +
climatology/clim/ClimatologySpark.py | 455 ++
climatology/clim/ClimatologySpark2.py | 636 +++
climatology/clim/README.md | 32 +
climatology/clim/__init__.py | 0
climatology/clim/binsum.f | 64 +
climatology/clim/cache.py | 73 +
climatology/clim/climatology.py | 233 +
climatology/clim/climatology1.py | 233 +
climatology/clim/climatology2.py | 453 ++
climatology/clim/climatology3Spark.py | 418 ++
climatology/clim/cluster.py | 84 +
climatology/clim/cluster2.py | 84 +
climatology/clim/datasets.py | 317 ++
climatology/clim/dparkTest.py | 9 +
climatology/clim/gaussInterp.py | 43 +
climatology/clim/gaussInterp.pyx | 131 +
climatology/clim/gaussInterp_f.f | 219 +
climatology/clim/gaussInterp_f.mk | 1 +
climatology/clim/gaussInterp_slow.py | 130 +
climatology/clim/interp.f | 302 +
climatology/clim/jobClimatology2.py | 22 +
climatology/clim/jobTest.py | 18 +
climatology/clim/orig/C/README | 6 +
climatology/clim/orig/C/binsum.c | 125 +
climatology/clim/orig/C/clouderosion.c | 33 +
climatology/clim/orig/C/gaussinterp.readme | 159 +
climatology/clim/orig/C/gaussinterp_C_code.tar | Bin 0 -> 51200 bytes
climatology/clim/orig/C/interp.c | 448 ++
climatology/clim/orig/C/makefile | 33 +
climatology/clim/orig/C/setupinterp.c | 431 ++
.../clim/orig/Fortran/armstrong_interp_code.tar | Bin 0 -> 30720 bytes
climatology/clim/orig/Fortran/binsum.f | 64 +
climatology/clim/orig/Fortran/interp.f | 302 +
climatology/clim/orig/Fortran/makefile | 46 +
climatology/clim/orig/Fortran/passbase.f | 9 +
climatology/clim/orig/Fortran/setupinterp.f | 291 +
climatology/clim/pixelStats.py | 217 +
climatology/clim/plotlib.py | 843 +++
climatology/clim/reroot.py | 31 +
climatology/clim/setup.py | 8 +
climatology/clim/sort.py | 43 +
climatology/clim/sparkTest.py | 17 +
climatology/clim/spatialFilter.py | 36 +
climatology/clim/spatialFilter_f.f | 121 +
climatology/clim/spatialFilter_f.mk | 1 +
climatology/clim/split.py | 198 +
climatology/clim/test/__init__.py | 0
climatology/clim/test/ccmpTest.py | 19 +
climatology/clim/timePartitions.py | 32 +
climatology/clim/util/__init__.py | 0
climatology/clim/util/array.py | 180 +
climatology/clim/util/introspect.py | 35 +
climatology/clim/util/plot.py | 133 +
climatology/clim/util/stats.py | 218 +
climatology/clim/util/timeJ2000.py | 369 ++
climatology/clim/util/warn.py | 43 +
climatology/clim/util/wls.py | 798 +++
climatology/clim/variables.py | 140 +
climatology/clim/wls.py | 798 +++
climatology/setup.py | 9 +
data-access/.gitignore | 4 +
data-access/README.md | 58 +
.../config/schemas/cassandra/nexustiles.cql | 8 +
.../nexustiles/conf/lang/contractions_ca.txt | 8 +
.../nexustiles/conf/lang/contractions_fr.txt | 15 +
.../nexustiles/conf/lang/contractions_ga.txt | 5 +
.../nexustiles/conf/lang/contractions_it.txt | 23 +
.../nexustiles/conf/lang/hyphenations_ga.txt | 5 +
.../nexustiles/conf/lang/stemdict_nl.txt | 6 +
.../nexustiles/conf/lang/stoptags_ja.txt | 420 ++
.../nexustiles/conf/lang/stopwords_ar.txt | 125 +
.../nexustiles/conf/lang/stopwords_bg.txt | 193 +
.../nexustiles/conf/lang/stopwords_ca.txt | 220 +
.../nexustiles/conf/lang/stopwords_cz.txt | 172 +
.../nexustiles/conf/lang/stopwords_da.txt | 110 +
.../nexustiles/conf/lang/stopwords_de.txt | 294 +
.../nexustiles/conf/lang/stopwords_el.txt | 78 +
.../nexustiles/conf/lang/stopwords_en.txt | 54 +
.../nexustiles/conf/lang/stopwords_es.txt | 356 ++
.../nexustiles/conf/lang/stopwords_eu.txt | 99 +
.../nexustiles/conf/lang/stopwords_fa.txt | 313 ++
.../nexustiles/conf/lang/stopwords_fi.txt | 97 +
.../nexustiles/conf/lang/stopwords_fr.txt | 186 +
.../nexustiles/conf/lang/stopwords_ga.txt | 110 +
.../nexustiles/conf/lang/stopwords_gl.txt | 161 +
.../nexustiles/conf/lang/stopwords_hi.txt | 235 +
.../nexustiles/conf/lang/stopwords_hu.txt | 211 +
.../nexustiles/conf/lang/stopwords_hy.txt | 46 +
.../nexustiles/conf/lang/stopwords_id.txt | 359 ++
.../nexustiles/conf/lang/stopwords_it.txt | 303 +
.../nexustiles/conf/lang/stopwords_ja.txt | 127 +
.../nexustiles/conf/lang/stopwords_lv.txt | 172 +
.../nexustiles/conf/lang/stopwords_nl.txt | 119 +
.../nexustiles/conf/lang/stopwords_no.txt | 194 +
.../nexustiles/conf/lang/stopwords_pt.txt | 253 +
.../nexustiles/conf/lang/stopwords_ro.txt | 233 +
.../nexustiles/conf/lang/stopwords_ru.txt | 243 +
.../nexustiles/conf/lang/stopwords_sv.txt | 133 +
.../nexustiles/conf/lang/stopwords_th.txt | 119 +
.../nexustiles/conf/lang/stopwords_tr.txt | 212 +
.../nexustiles/conf/lang/userdict_ja.txt | 29 +
.../solr-7.1.0/nexustiles/conf/managed-schema | 951 ++++
.../solr-7.1.0/nexustiles/conf/params.json | 20 +
.../solr-7.1.0/nexustiles/conf/protwords.txt | 21 +
.../solr-7.1.0/nexustiles/conf/solrconfig.xml | 1364 +++++
.../solr-7.1.0/nexustiles/conf/stopwords.txt | 14 +
.../solr-7.1.0/nexustiles/conf/synonyms.txt | 29 +
.../solr/dataset/conf/_rest_managed.json | 1 +
.../schemas/solr/dataset/conf/currency.xml | 67 +
.../solr/dataset/conf/lang/stopwords_en.txt | 54 +
.../schemas/solr/dataset/conf/protwords.txt | 21 +
.../config/schemas/solr/dataset/conf/schema.xml | 660 +++
.../schemas/solr/dataset/conf/solrconfig.xml | 583 ++
.../schemas/solr/dataset/conf/stopwords.txt | 14 +
.../schemas/solr/dataset/conf/synonyms.txt | 29 +
.../config/schemas/solr/dataset/core.properties | 0
.../schemas/solr/nexustiles/conf/currency.xml | 67 +
.../schemas/solr/nexustiles/conf/elevate.xml | 42 +
.../nexustiles/conf/lang/contractions_ca.txt | 8 +
.../nexustiles/conf/lang/contractions_fr.txt | 15 +
.../nexustiles/conf/lang/contractions_ga.txt | 5 +
.../nexustiles/conf/lang/contractions_it.txt | 23 +
.../nexustiles/conf/lang/hyphenations_ga.txt | 5 +
.../solr/nexustiles/conf/lang/stemdict_nl.txt | 6 +
.../solr/nexustiles/conf/lang/stoptags_ja.txt | 420 ++
.../solr/nexustiles/conf/lang/stopwords_ar.txt | 125 +
.../solr/nexustiles/conf/lang/stopwords_bg.txt | 193 +
.../solr/nexustiles/conf/lang/stopwords_ca.txt | 220 +
.../solr/nexustiles/conf/lang/stopwords_cz.txt | 172 +
.../solr/nexustiles/conf/lang/stopwords_da.txt | 110 +
.../solr/nexustiles/conf/lang/stopwords_de.txt | 294 +
.../solr/nexustiles/conf/lang/stopwords_el.txt | 78 +
.../solr/nexustiles/conf/lang/stopwords_en.txt | 54 +
.../solr/nexustiles/conf/lang/stopwords_es.txt | 356 ++
.../solr/nexustiles/conf/lang/stopwords_eu.txt | 99 +
.../solr/nexustiles/conf/lang/stopwords_fa.txt | 313 ++
.../solr/nexustiles/conf/lang/stopwords_fi.txt | 97 +
.../solr/nexustiles/conf/lang/stopwords_fr.txt | 186 +
.../solr/nexustiles/conf/lang/stopwords_ga.txt | 110 +
.../solr/nexustiles/conf/lang/stopwords_gl.txt | 161 +
.../solr/nexustiles/conf/lang/stopwords_hi.txt | 235 +
.../solr/nexustiles/conf/lang/stopwords_hu.txt | 211 +
.../solr/nexustiles/conf/lang/stopwords_hy.txt | 46 +
.../solr/nexustiles/conf/lang/stopwords_id.txt | 359 ++
.../solr/nexustiles/conf/lang/stopwords_it.txt | 303 +
.../solr/nexustiles/conf/lang/stopwords_ja.txt | 127 +
.../solr/nexustiles/conf/lang/stopwords_lv.txt | 172 +
.../solr/nexustiles/conf/lang/stopwords_nl.txt | 119 +
.../solr/nexustiles/conf/lang/stopwords_no.txt | 194 +
.../solr/nexustiles/conf/lang/stopwords_pt.txt | 253 +
.../solr/nexustiles/conf/lang/stopwords_ro.txt | 233 +
.../solr/nexustiles/conf/lang/stopwords_ru.txt | 243 +
.../solr/nexustiles/conf/lang/stopwords_sv.txt | 133 +
.../solr/nexustiles/conf/lang/stopwords_th.txt | 119 +
.../solr/nexustiles/conf/lang/stopwords_tr.txt | 212 +
.../solr/nexustiles/conf/lang/userdict_ja.txt | 29 +
.../schemas/solr/nexustiles/conf/managed-schema | 1017 ++++
.../schemas/solr/nexustiles/conf/params.json | 20 +
.../schemas/solr/nexustiles/conf/protwords.txt | 21 +
.../schemas/solr/nexustiles/conf/solrconfig.xml | 1408 +++++
.../schemas/solr/nexustiles/conf/stopwords.txt | 14 +
.../schemas/solr/nexustiles/conf/synonyms.txt | 29 +
.../schemas/solr/nexustiles/core.properties | 0
data-access/nexustiles/__init__.py | 0
data-access/nexustiles/config/datastores.ini | 9 +
data-access/nexustiles/dao/CassandraProxy.pyx | 146 +
data-access/nexustiles/dao/SolrProxy.pyx | 567 ++
data-access/nexustiles/dao/__init__.py | 0
data-access/nexustiles/model/__init__.py | 0
data-access/nexustiles/model/nexusmodel.py | 250 +
data-access/nexustiles/nexustiles.py | 366 ++
data-access/requirements.txt | 41 +
data-access/setup.py | 41 +
data-access/tests/__init__.py | 4 +
data-access/tests/config/datastores.ini | 9 +
data-access/tests/nexusmodel_test.py | 370 ++
data-access/tests/nexustiles_test.py | 92 +
data-access/tests/sizefromcass.py | 22 +
data-access/tests/solr_update.py | 23 +
data-access/tests/solrproxy_test.py | 62 +
docker/.gitignore | 1 +
docker/README.md | 1 +
docker/cassandra/Dockerfile | 5 +
docker/cassandra/README.md | 0
docker/ingest-admin/Dockerfile | 12 +
docker/ingest-admin/README.md | 86 +
docker/ingest-admin/docker-compose.yml | 73 +
docker/ingest-admin/nx-deploy-stream.sh | 38 +
docker/ingest-admin/nx-env.sh | 5 +
docker/ingest-base/Dockerfile | 62 +
docker/ingest-base/README.md | 3 +
docker/ingest-base/install-custom-software.sh | 82 +
docker/ingest-base/ivy_settings.xml | 31 +
docker/ingest-base/maven_settings.xml | 256 +
docker/ingest-base/nexus-ingest.sh | 115 +
docker/ingest-base/stream-definitions | 17 +
docker/ingest-base/xd-container-logback.groovy | 83 +
docker/ingest-base/xd-singlenode-logback.groovy | 91 +
docker/ingest-container/Dockerfile | 5 +
docker/ingest-container/README.md | 74 +
docker/ingest-container/docker-compose.yml | 44 +
docker/ingest-singlenode/Dockerfile | 5 +
docker/ingest-singlenode/README.md | 27 +
docker/kafka/Dockerfile | 26 +
docker/kafka/README.md | 19 +
docker/kafka/docker-compose.yml | 53 +
docker/kafka/kafka.properties | 131 +
docker/nexus-cluster.yml | 251 +
docker/nexus-webapp/Dockerfile | 20 +
docker/nexus-webapp/docker-entrypoint.sh | 13 +
docker/nexusbase/Dockerfile | 36 +
docker/solr-single-node/Dockerfile | 14 +
docker/solr-single-node/README.md | 9 +
docker/solr/Dockerfile | 20 +
docker/solr/README.md | 0
docker/spark-mesos-agent/Dockerfile | 13 +
docker/spark-mesos-agent/docker-entrypoint.sh | 13 +
docker/spark-mesos-base/Dockerfile | 109 +
docker/spark-mesos-base/install_mesos.sh | 49 +
docker/spark-mesos-master/Dockerfile | 13 +
docker/zookeeper/Dockerfile | 27 +
docker/zookeeper/README.md | 0
esip-workshop/README.md | 2 +
.../docker/analysis/docker-compose.yml | 93 +
esip-workshop/docker/infrastructure/.env | 1 +
.../docker/infrastructure/docker-compose.yml | 172 +
.../infrastructure/solr-zk-custom-init.sh | 15 +
esip-workshop/docker/ingest/docker-compose.yml | 192 +
.../docker/ingest/stream-definitions.txt | 13 +
esip-workshop/docker/jupyter/Dockerfile | 22 +
esip-workshop/docker/jupyter/requirements.txt | 4 +
esip-workshop/ec2/startup.sh | 31 +
.../Nexus Deployment and Ingestion.ipynb | 198 +
.../other-notebooks/Nexus Monitoring.ipynb | 1159 ++++
.../workshop1/1 - Introduction.ipynb | 75 +
.../workshop1/2 - Running Code.ipynb | 271 +
.../workshop1/3 - Python Basics.ipynb | 1111 ++++
.../workshop1/4 - Student Exercise.ipynb | 479 ++
.../5 - Student Exercise - Answers.ipynb | 690 +++
.../workshop2/1- Introduction.ipynb | 62 +
.../workshop2/2 - Infrastructure.ipynb | 160 +
.../workshop2/3 - Analysis.ipynb | 218 +
.../workshop2/4 - Ingestion.ipynb | 260 +
.../workshop2/img/ec2-containers-analysis.png | Bin 0 -> 83031 bytes
.../img/ec2-containers-infrastructure.png | Bin 0 -> 60123 bytes
.../workshop2/img/ec2-containers.png | Bin 0 -> 112006 bytes
nexus-ingest/.gitignore | 4 +
nexus-ingest/README.md | 3 +
nexus-ingest/dataset-tiler/.gitignore | 28 +
nexus-ingest/dataset-tiler/README.md | 11 +
nexus-ingest/dataset-tiler/build.gradle | 89 +
.../reports/license/dependency-license.html | 1532 +++++
.../reports/license/dependency-license.xml | 696 +++
.../reports/license/license-dependency.html | 581 ++
.../reports/license/license-dependency.xml | 257 +
.../build/reports/project/dependencies.txt | 4563 +++++++++++++++
.../project/dependencies/css/base-style.css | 179 +
.../reports/project/dependencies/css/style.css | 84 +
.../reports/project/dependencies/css/tree.css | 102 +
.../reports/project/dependencies/images/d.gif | Bin 0 -> 2944 bytes
.../reports/project/dependencies/images/d.png | Bin 0 -> 7635 bytes
.../project/dependencies/images/throbber.gif | Bin 0 -> 1849 bytes
.../reports/project/dependencies/index.html | 44 +
.../project/dependencies/js/jquery.jstree.js | 4564 +++++++++++++++
.../dependencies/js/jquery.min-1.11.0.js | 4 +
.../reports/project/dependencies/js/script.js | 202 +
.../reports/project/dependencies/root.html | 40 +
.../build/reports/project/dependencies/root.js | 1 +
.../build/reports/project/properties.txt | 151 +
.../build/reports/project/tasks.txt | 82 +
.../gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 53636 bytes
.../gradle/wrapper/gradle-wrapper.properties | 6 +
nexus-ingest/dataset-tiler/gradlew | 160 +
.../nexus/ingest/datatiler/FileSlicer.groovy | 14 +
.../datatiler/SliceFileByDimension.groovy | 101 +
.../datatiler/SliceFileByTilesDesired.groovy | 80 +
.../datatiler/DataTilerOptionsMetadata.java | 81 +
.../datatiler/IntegrationConfiguration.java | 112 +
.../resources/config/spring-module.properties | 2 +
.../DatasetTilerIntegrationTest.groovy | 181 +
.../datatiler/DatasetTilerPropertiesTest.groovy | 43 +
.../datatiler/SliceFileByDimensionTest.groovy | 49 +
.../SliceFileByTilesDesiredTest.groovy | 33 +
...4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc.nc4 | Bin 0 -> 1037357 bytes
...nd_Analysis_20160101_V02.0_L3.0_RSS.split.nc | Bin 0 -> 206870 bytes
...2B_SSS_00865_20150331T163144_R13080.split.h5 | Bin 0 -> 3000192 bytes
...02100_metopb_02676_eps_o_coa_2101_ovw.l2.nc4 | Bin 0 -> 38215 bytes
nexus-ingest/developer-box/.gitignore | 8 +
nexus-ingest/developer-box/README.md | 103 +
nexus-ingest/developer-box/Vagrantfile | 74 +
nexus-ingest/developer-box/bootstrap.sh | 61 +
nexus-ingest/developer-box/cassandra-ddl.sql | 6 +
.../developer-box/data/ascatb/dataseturl.txt | 1 +
.../developer-box/data/avhrr/dataseturl.txt | 1 +
.../developer-box/data/ccmp/dataseturl.txt | 1 +
.../developer-box/data/grace/dataseturl.txt | 1 +
.../data/measures_alt/dataseturl.txt | 1 +
.../developer-box/data/modis/dataseturl.txt | 1 +
.../data/modis_aqua_chl/dataseturl.txt | 1 +
.../developer-box/data/mur/dataseturl.txt | 1 +
.../developer-box/data/smap/dataseturl.txt | 1 +
.../developer-box/data/smapl3/dataseturl.txt | 1 +
.../developer-box/data/trmm/dataseturl.txt | 0
.../developer-box/solr-nexustiles-core.zip | Bin 0 -> 92000 bytes
.../developer-box/start-singlenode-at-boot.sh | 4 +
nexus-ingest/developer-box/stream-definitions | 64 +
nexus-ingest/developer-box/user-init.sh | 17 +
nexus-ingest/developer-box/user-update.sh | 86 +
.../developer-box/xd-singlenode-logback.groovy | 91 +
nexus-ingest/groovy-scripts/.gitignore | 4 +
nexus-ingest/groovy-scripts/README.md | 3 +
.../add-day-of-year-attribute.groovy | 48 +
.../add-time-from-granulename.groovy | 63 +
.../add-time-to-spatial-spec.groovy | 34 +
nexus-ingest/groovy-scripts/dev-install.sh | 7 +
.../groovy-scripts/generate-tile-id.groovy | 37 +
.../groovy-scripts/nexustile-to-string.groovy | 23 +
.../groovy-scripts/set-dataset-name.groovy | 28 +
.../test/TestAddTimeToSpatialSpec.groovy | 82 +
nexus-ingest/nexus-messages/.gitignore | 35 +
nexus-ingest/nexus-messages/README.md | 24 +
nexus-ingest/nexus-messages/build.gradle | 195 +
.../reports/license/dependency-license.html | 62 +
.../reports/license/dependency-license.xml | 6 +
.../reports/license/license-dependency.html | 69 +
.../reports/license/license-dependency.xml | 5 +
.../build/reports/project/dependencies.txt | 43 +
.../project/dependencies/css/base-style.css | 179 +
.../reports/project/dependencies/css/style.css | 84 +
.../reports/project/dependencies/css/tree.css | 102 +
.../reports/project/dependencies/images/d.gif | Bin 0 -> 2944 bytes
.../reports/project/dependencies/images/d.png | Bin 0 -> 7635 bytes
.../project/dependencies/images/throbber.gif | Bin 0 -> 1849 bytes
.../reports/project/dependencies/index.html | 44 +
.../project/dependencies/js/jquery.jstree.js | 4564 +++++++++++++++
.../dependencies/js/jquery.min-1.11.0.js | 4 +
.../reports/project/dependencies/js/script.js | 202 +
.../reports/project/dependencies/root.html | 40 +
.../build/reports/project/dependencies/root.js | 1 +
.../build/reports/project/properties.txt | 128 +
.../build/reports/project/tasks.txt | 68 +
.../nexus-messages/example.gradle.properties | 7 +
.../gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 53639 bytes
.../gradle/wrapper/gradle-wrapper.properties | 6 +
nexus-ingest/nexus-messages/gradlew | 160 +
.../src/main/proto/NexusContent.proto | 99 +
.../src/main/python/nexusproto/__init__.py | 0
.../src/main/python/nexusproto/serialization.py | 40 +
.../src/main/python/nexusproto/setup.py | 31 +
nexus-ingest/nexus-sink/.gitignore | 16 +
nexus-ingest/nexus-sink/README.md | 11 +
nexus-ingest/nexus-sink/build.gradle | 176 +
.../reports/license/dependency-license.html | 1506 +++++
.../reports/license/dependency-license.xml | 680 +++
.../reports/license/license-dependency.html | 557 ++
.../reports/license/license-dependency.xml | 249 +
.../build/reports/project/dependencies.txt | 5266 ++++++++++++++++++
.../project/dependencies/css/base-style.css | 179 +
.../reports/project/dependencies/css/style.css | 84 +
.../reports/project/dependencies/css/tree.css | 102 +
.../reports/project/dependencies/images/d.gif | Bin 0 -> 2944 bytes
.../reports/project/dependencies/images/d.png | Bin 0 -> 7635 bytes
.../project/dependencies/images/throbber.gif | Bin 0 -> 1849 bytes
.../reports/project/dependencies/index.html | 44 +
.../project/dependencies/js/jquery.jstree.js | 4564 +++++++++++++++
.../dependencies/js/jquery.min-1.11.0.js | 4 +
.../reports/project/dependencies/js/script.js | 202 +
.../reports/project/dependencies/root.html | 40 +
.../build/reports/project/dependencies/root.js | 1 +
.../build/reports/project/properties.txt | 156 +
.../nexus-sink/build/reports/project/tasks.txt | 89 +
.../nexus-sink/example.gradle.properties | 5 +
.../gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 53639 bytes
.../gradle/wrapper/gradle-wrapper.properties | 6 +
nexus-ingest/nexus-sink/gradlew | 160 +
nexus-ingest/nexus-sink/settings.gradle | 2 +
.../nexus/ingest/nexussink/NexusService.groovy | 166 +
.../nexussink/InfrastructureConfiguration.java | 127 +
.../nexussink/IntegrationConfiguration.java | 136 +
.../nexussink/NexusSinkOptionsMetadata.java | 135 +
.../resources/config/spring-module.properties | 2 +
.../nexussink/NexusServiceUnitTest.groovy | 65 +
.../nexussink/NexusSinkIntegrationTest.groovy | 685 +++
.../NexusSinkOptionsIntegrationTest.java | 50 +
.../TestInfrastructureConfiguration.java | 52 +
.../nexus-sink/src/test/resources/init-db.cql | 6 +
.../log4j-embedded-cassandra.properties | 17 +
.../src/test/resources/logback-test.xml | 20 +
.../resources/solr/nexustiles/conf/currency.xml | 67 +
.../resources/solr/nexustiles/conf/elevate.xml | 38 +
.../nexustiles/conf/lang/contractions_ca.txt | 8 +
.../nexustiles/conf/lang/contractions_fr.txt | 15 +
.../nexustiles/conf/lang/contractions_ga.txt | 5 +
.../nexustiles/conf/lang/contractions_it.txt | 23 +
.../nexustiles/conf/lang/hyphenations_ga.txt | 5 +
.../solr/nexustiles/conf/lang/stemdict_nl.txt | 6 +
.../solr/nexustiles/conf/lang/stoptags_ja.txt | 420 ++
.../solr/nexustiles/conf/lang/stopwords_ar.txt | 125 +
.../solr/nexustiles/conf/lang/stopwords_bg.txt | 193 +
.../solr/nexustiles/conf/lang/stopwords_ca.txt | 220 +
.../solr/nexustiles/conf/lang/stopwords_cz.txt | 172 +
.../solr/nexustiles/conf/lang/stopwords_da.txt | 110 +
.../solr/nexustiles/conf/lang/stopwords_de.txt | 294 +
.../solr/nexustiles/conf/lang/stopwords_el.txt | 78 +
.../solr/nexustiles/conf/lang/stopwords_en.txt | 54 +
.../solr/nexustiles/conf/lang/stopwords_es.txt | 356 ++
.../solr/nexustiles/conf/lang/stopwords_eu.txt | 99 +
.../solr/nexustiles/conf/lang/stopwords_fa.txt | 313 ++
.../solr/nexustiles/conf/lang/stopwords_fi.txt | 97 +
.../solr/nexustiles/conf/lang/stopwords_fr.txt | 186 +
.../solr/nexustiles/conf/lang/stopwords_ga.txt | 110 +
.../solr/nexustiles/conf/lang/stopwords_gl.txt | 161 +
.../solr/nexustiles/conf/lang/stopwords_hi.txt | 235 +
.../solr/nexustiles/conf/lang/stopwords_hu.txt | 211 +
.../solr/nexustiles/conf/lang/stopwords_hy.txt | 46 +
.../solr/nexustiles/conf/lang/stopwords_id.txt | 359 ++
.../solr/nexustiles/conf/lang/stopwords_it.txt | 303 +
.../solr/nexustiles/conf/lang/stopwords_ja.txt | 127 +
.../solr/nexustiles/conf/lang/stopwords_lv.txt | 172 +
.../solr/nexustiles/conf/lang/stopwords_nl.txt | 119 +
.../solr/nexustiles/conf/lang/stopwords_no.txt | 194 +
.../solr/nexustiles/conf/lang/stopwords_pt.txt | 253 +
.../solr/nexustiles/conf/lang/stopwords_ro.txt | 233 +
.../solr/nexustiles/conf/lang/stopwords_ru.txt | 243 +
.../solr/nexustiles/conf/lang/stopwords_sv.txt | 133 +
.../solr/nexustiles/conf/lang/stopwords_th.txt | 119 +
.../solr/nexustiles/conf/lang/stopwords_tr.txt | 212 +
.../solr/nexustiles/conf/lang/userdict_ja.txt | 29 +
.../solr/nexustiles/conf/managed-schema | 489 ++
.../resources/solr/nexustiles/conf/params.json | 20 +
.../solr/nexustiles/conf/protwords.txt | 21 +
.../solr/nexustiles/conf/solrconfig.xml | 1578 ++++++
.../solr/nexustiles/conf/stopwords.txt | 14 +
.../resources/solr/nexustiles/conf/synonyms.txt | 29 +
.../resources/solr/nexustiles/core.properties | 0
.../nexus-sink/src/test/resources/solr/solr.xml | 53 +
.../src/test/resources/spring-cassandra.yaml | 630 +++
nexus-ingest/nexus-xd-python-modules/.gitignore | 9 +
nexus-ingest/nexus-xd-python-modules/README.md | 5 +
.../nexus-xd-python-modules/nexusxd/__init__.py | 4 +
.../nexusxd/callncpdq.py | 58 +
.../nexus-xd-python-modules/nexusxd/callncra.py | 54 +
.../nexusxd/computespeeddirfromuv.py | 78 +
.../nexusxd/emptytilefilter.py | 37 +
.../nexusxd/kelvintocelsius.py | 30 +
.../nexusxd/normalizetimebeginningofmonth.py | 39 +
.../nexusxd/processorchain.py | 43 +
.../nexusxd/regrid1x1.py | 134 +
.../nexusxd/subtract180longitude.py | 39 +
.../nexusxd/tilereadingprocessor.py | 239 +
.../nexusxd/tilesumarizingprocessor.py | 105 +
.../nexusxd/winddirspeedtouv.py | 96 +
.../nexus-xd-python-modules/package-list.txt | 31 +
.../nexus-xd-python-modules/requirements.txt | 21 +
nexus-ingest/nexus-xd-python-modules/setup.py | 34 +
.../nexus-xd-python-modules/tests/__init__.py | 4 +
.../tests/callncpdq_test.py | 60 +
.../nexus-xd-python-modules/tests/checker.py | 95 +
.../tests/computespeeddirfromuv_test.py | 140 +
.../tests/convert_iceshelf.py | 77 +
.../tests/datafiles/empty_mur.nc4 | Bin 0 -> 60937 bytes
.../tests/datafiles/not_empty_ascatb.nc4 | Bin 0 -> 78036 bytes
.../tests/datafiles/not_empty_avhrr.nc4 | Bin 0 -> 49511 bytes
.../tests/datafiles/not_empty_ccmp.nc | Bin 0 -> 206870 bytes
.../tests/datafiles/not_empty_measures_alt.nc | Bin 0 -> 45477 bytes
.../tests/datafiles/not_empty_mur.nc4 | Bin 0 -> 60907 bytes
.../tests/datafiles/not_empty_smap.h5 | Bin 0 -> 3000192 bytes
.../tests/datafiles/partial_empty_mur.nc4 | Bin 0 -> 84738 bytes
.../ascat_longitude_more_than_180.bin | Bin 0 -> 3858 bytes
.../ascatb_nonempty_nexustile.bin | Bin 0 -> 3515 bytes
.../avhrr_nonempty_nexustile.bin | Bin 0 -> 892 bytes
.../ccmp_nonempty_nexustile.bin | Bin 0 -> 27427 bytes
.../smap_nonempty_nexustile.bin | Bin 0 -> 1374 bytes
.../tests/hd5splitter.py | 123 +
.../tests/kelvintocelsius_test.py | 47 +
.../tests/processorchain_test.py | 87 +
.../tests/regrid1x1_test.py | 86 +
.../tests/subtract180longitude_test.py | 62 +
.../tests/tilereadingprocessor_test.py | 431 ++
.../tests/tilesumarizingprocessor_test.py | 89 +
.../tests/winddirspeedtouv_test.py | 108 +
nexus-ingest/spring-xd-python/.gitignore | 11 +
nexus-ingest/spring-xd-python/README.md | 7 +
nexus-ingest/spring-xd-python/setup.py | 28 +
.../spring-xd-python/springxd/__init__.py | 0
.../spring-xd-python/springxd/tcpstream.py | 143 +
nexus-ingest/tcp-shell/.gitignore | 28 +
nexus-ingest/tcp-shell/README.md | 11 +
nexus-ingest/tcp-shell/build.gradle | 72 +
.../reports/license/dependency-license.html | 1420 +++++
.../reports/license/dependency-license.xml | 636 +++
.../reports/license/license-dependency.html | 526 ++
.../reports/license/license-dependency.xml | 234 +
.../build/reports/project/dependencies.txt | 4080 ++++++++++++++
.../project/dependencies/css/base-style.css | 179 +
.../reports/project/dependencies/css/style.css | 84 +
.../reports/project/dependencies/css/tree.css | 102 +
.../reports/project/dependencies/images/d.gif | Bin 0 -> 2944 bytes
.../reports/project/dependencies/images/d.png | Bin 0 -> 7635 bytes
.../project/dependencies/images/throbber.gif | Bin 0 -> 1849 bytes
.../reports/project/dependencies/index.html | 44 +
.../project/dependencies/js/jquery.jstree.js | 4564 +++++++++++++++
.../dependencies/js/jquery.min-1.11.0.js | 4 +
.../reports/project/dependencies/js/script.js | 202 +
.../reports/project/dependencies/root.html | 40 +
.../build/reports/project/dependencies/root.js | 1 +
.../build/reports/project/properties.txt | 144 +
.../tcp-shell/build/reports/project/tasks.txt | 80 +
.../tcp-shell/gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 53639 bytes
.../gradle/wrapper/gradle-wrapper.properties | 6 +
nexus-ingest/tcp-shell/gradlew | 160 +
.../org/nasa/ingest/tcpshell/PortResolver.java | 27 +
.../nasa/ingest/tcpshell/ShellWordsParser.java | 85 +
.../nasa/ingest/tcpshell/TcpShellCommand.java | 379 ++
.../config/IntegrationConfiguration.java | 212 +
.../config/TcpShellModuleOptionsMetadata.java | 132 +
.../main/resources/config/tcpshell.properties | 3 +
.../ingest/tcpshell/PythonAvailableRule.java | 40 +
.../TcpShellCommandIntegrationTests.java | 212 +
.../ingest/tcpshell/TcpShellCommandTests.java | 45 +
.../tcp-shell/src/test/resources/echo.py | 11 +
.../tcp-shell/src/test/resources/echoenv.py | 12 +
...4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc.nc4 | Bin 0 -> 1037357 bytes
.../tcp-shell/src/test/resources/onetomany.py | 12 +
.../tcp-shell/src/test/resources/tcpstream.py | 139 +
.../src/test/resources/tilereadingprocessor.py | 220 +
tools/deletebyquery/.gitignore | 2 +
tools/deletebyquery/deletebyquery.py | 231 +
tools/deletebyquery/requirements.txt | 22 +
614 files changed, 120666 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/LICENSE
----------------------------------------------------------------------
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..8dada3e
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright {yyyy} {name of copyright owner}
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..cc6ea8e
--- /dev/null
+++ b/README.md
@@ -0,0 +1,118 @@
+nexus
+=====
+
+The next generation cloud-based science data service platform
+
+# Developer Installation
+
+1. Follow instructions for installing [nexusproto](nexus-ingest/nexus-messages/README.md)
+2. Follow instructions for installing [data-access](data-access/README.md)
+3. Follow instructions for setting up [nexus-ingest](nexus-ingest/developer-box/README.md)
+ 1. [Ingest some data](nexus-ingest/developer-box/README.md#ingesting-data)
+4. Follow instructions for running [analysis](analysis/README.md)
+
+
+# Installation Instructions
+
+## Zookeeper Setup
+
+1. Install and configure Apache Zookeeper 3.4.x
+2. Create chroots `solr`, `xd`, and `kafka`
+
+## Database Setup
+
+### Apache Solr
+
+1. Install and configure [Apache Solr Cloud 5.3.x](http://archive.apache.org/dist/lucene/solr/)
+2. Download [JTS Topology Suite v1.13](https://sourceforge.net/projects/jts-topo-suite/files/jts/1.13/) and extract the zip.
+3. From the exploded JTS zip, copy `$JTS_ZIP/lib/jts-1.13.jar` and `$JTS_ZIP/lib/jtsio-1.13.jar` into `$SOLR_INSTALL_DIR/server/lib/ext` on all Solr nodes.
+4. Configure Solr Cloud to use the `/solr` chroot of zookeeper
+5. On one of the Solr cloud nodes, upload the `nexustiles` configuration (located in [data-access/config/schemas/solr](data-access/config/schemas/solr)) as a configset
+
+ ````
+ ./zkcli.sh -cmd upconfig -z $ZK_SERVERS/solr -confname nexustiles -confdir /path/to/nexustiles/conf
+ ````
+
+6. [Create a new collection](https://cwiki.apache.org/confluence/display/solr/Collections+API#CollectionsAPI-api1) with the name nexustiles. Use the nexustiles configset uploaded previously.
+
+ ````
+ curl "http://<SOLR_HOST>/solr/admin/collections?action=CREATE&name=nexustiles&collection.configName=nexustiles"
+ ````
+7. Repeat steps 5 & 6 for the [datasets](data-access/config/schemas/solr) collection.
+
+
+### Apache Cassandra
+
+1. Install and configure [Apache Cassandra 2.2.x](http://cassandra.apache.org/download/)
+2. Execute the DDL located in [nexustiles.cql](data-access/config/schemas/cassandra/nexustiles.cql)
+
+### HSQLDB
+
+1. Install and run [HSQLDB 2.3.x](http://hsqldb.org/)
+
+### Redis
+
+1. Install and run [Redis 3.0.x](http://redis.io/)
+
+## Apache Kafka
+
+1. Install and configure [Apache Kafka 2.11-0.9.0.1](http://kafka.apache.org/)
+2. Configure Kafka to use the `/kafka` chroot of zookeeper
+
+## Spring XD
+
+1. Install [Spring XD 1.3.1.RELEASE](http://docs.spring.io/spring-xd/docs/1.3.1.RELEASE/reference/html/)
+2. Optionally install [Flo for Spring XD](https://docs.pivotal.io/spring-flo/installing-flo.html)
+3. Configure to use previously installed Apache Kafka as messaging bus
+4. Configure to use previously installed Redis for analytics
+5. Configure to use previously installed HSQLDB for Job Repository
+6. Configure to use previously installed Apache Zookeeper using the `xd` chroot
+7. Choose and configure a location for the [custom module registry](http://docs.spring.io/spring-xd/docs/current/reference/html/#_the_module_registry)
+8. Create a directory called `none` in `$SPRING_XD_HOME/xd/lib`
+
+### Preparing the XD Containers
+
+In order for streams to use the custom python and groovy scripts found in nexus-ingest, the machines that will be running Spring XD containers need to have some software installed on them.
+
+#### Anaconda
+
+1. Install [Anaconda 4.0.0](https://www.continuum.io/downloads) with Python 2.7
+2. Create an Anaconda environment
+
+ ````
+ conda create --name nexus-xd-python-modules python
+ ````
+
+3. Install conda dependencies
+
+ ````
+ conda install libnetcdf
+ conda install netcdf4
+ conda install numpy
+ ````
+
+4. Install [nexusproto](nexus-ingest/nexus-messages)
+5. Install [spring-xd-python](nexus-ingest/spring-xd-python)
+6. Install [nexus-xd-python-modules](nexus-ingest/nexus-xd-python-modules)
+
+#### Java 8
+
+1. Install Java 8 JDK. Either Oracle or OpenJDK.
+
+#### nexus-messages
+
+1. The [nexus-messages](nexus-ingest/nexus-messages) JAR needs to be available via Maven. There are a number of ways to do this. One way is to explicitly install the dependency into the local maven cache on each XD Container node.
+
+ ````
+ mvn install:install-file -DpomFile=nexus-messages-VERSION.xml -Dfile=nexus-messages-VERSION.jar
+ ````
+
+#### Groovy Scripts
+
+1. Place the [groovy-scripts](nexus-ingest/groovy-scripts) in a location that will be accessible to all XD Container nodes
+
+#### Custom Modules
+
+1. Upload the [dataset-tiler](nexus-ingest/dataset-tiler) custom module
+2. Upload the [tcpshell](nexus-ingest/tcp-shell) custom module
+3. Upload the [nexus-sink](nexus-ingest/nexus-sink) custom module
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/.DS_Store
----------------------------------------------------------------------
diff --git a/analysis/.DS_Store b/analysis/.DS_Store
new file mode 100644
index 0000000..ec4f52c
Binary files /dev/null and b/analysis/.DS_Store differ
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/.gitignore
----------------------------------------------------------------------
diff --git a/analysis/.gitignore b/analysis/.gitignore
new file mode 100644
index 0000000..743ca99
--- /dev/null
+++ b/analysis/.gitignore
@@ -0,0 +1,6 @@
+build
+dist
+*.egg-info
+*.c
+*.so
+*.nc
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/README.md
----------------------------------------------------------------------
diff --git a/analysis/README.md b/analysis/README.md
new file mode 100644
index 0000000..3607682
--- /dev/null
+++ b/analysis/README.md
@@ -0,0 +1,25 @@
+analysis
+=====
+
+Python module that exposes NEXUS analytical capabilities via a HTTP webservice. Accessible endpoints are described on the [API](https://github.com/dataplumber/nexus/wiki/API) page of the wiki.
+
+# Developer Setup
+
+**NOTE** This project has a dependency on [data-access](https://github.jpl.nasa.gov/thuang/nexus/tree/master/data-access). Make sure data-access is installed in the same environment you will be using for this module.
+
+1. Setup a separate conda env or activate an existing one
+
+ ````
+ conda create --name nexus-analysis python
+ source activate nexus-analysis
+ ````
+
+2. Install conda dependencies
+
+ ````
+ conda install numpy matplotlib mpld3 scipy netCDF4 basemap gdal pyproj=1.9.5.1 libnetcdf=4.3.3.1
+ ````
+
+3. Run `python setup.py install`
+
+4. Launch `python webservice/webapp.py`
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/package-list.txt
----------------------------------------------------------------------
diff --git a/analysis/package-list.txt b/analysis/package-list.txt
new file mode 100644
index 0000000..63bba2c
--- /dev/null
+++ b/analysis/package-list.txt
@@ -0,0 +1,52 @@
+# This file may be used to create an environment using:
+# $ conda create --name <env> --file <this file>
+# platform: osx-64
+basemap=1.0.7=np111py27_0
+configobj=5.0.6=py27_0
+curl=7.45.0=0
+cycler=0.10.0=py27_0
+freetype=2.5.5=1
+gdal=2.0.0=np111py27_3
+geos=3.4.2=0
+geotiff=1.4.1=0
+hdf4=4.2.11=0
+hdf5=1.8.16=0
+jbig=2.1=0
+jinja2=2.8=py27_1
+jpeg=8d=1
+kealib=1.4.5=2
+libgdal=2.0.0=4
+libnetcdf=4.4.0=1
+libpng=1.6.22=0
+libtiff=4.0.6=2
+markupsafe=0.23=py27_2
+matplotlib=1.5.1=np111py27_0
+mkl=11.3.3=0
+mpld3=0.2=py27_0
+netcdf4=1.2.4=np111py27_0
+numpy=1.11.2=py27_0
+openssl=1.0.2j=0
+pip=9.0.1=py27_1
+proj4=4.9.2=0
+pyface=4.5.2=py27_0
+pygments=2.1.3=py27_0
+pyparsing=2.1.4=py27_0
+pyproj=1.9.5.1=py27_0
+pyqt=4.11.4=py27_4
+python=2.7.12=1
+python-dateutil=2.5.3=py27_0
+pytz=2016.6.1=py27_0
+qt=4.8.7=4
+readline=6.2=2
+scipy=0.17.1=np111py27_0
+setuptools=27.2.0=py27_0
+sip=4.18=py27_0
+six=1.10.0=py27_0
+sqlite=3.13.0=0
+tk=8.5.18=0
+traits=4.5.0=py27_0
+traitsui=4.5.1=py27_0
+wheel=0.29.0=py27_0
+xerces-c=3.1.4=0
+xz=5.2.2=0
+zlib=1.2.8=3
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/requirements.txt
----------------------------------------------------------------------
diff --git a/analysis/requirements.txt b/analysis/requirements.txt
new file mode 100644
index 0000000..e23d607
--- /dev/null
+++ b/analysis/requirements.txt
@@ -0,0 +1,51 @@
+awscli==1.11.141
+backports-abc==0.4
+backports.functools-lru-cache==1.3
+backports.ssl-match-hostname==3.5.0.1
+basemap==1.0.7
+botocore==1.6.8
+Cartopy==0.14.2
+cassandra-driver==3.7.1
+certifi==2016.2.28
+colorama==0.3.7
+configobj==5.0.6
+cycler==0.10.0
+Cython==0.24
+descartes==1.0.2
+docutils==0.14
+funcsigs==1.0.2
+futures==3.1.1
+GDAL==2.0.0
+Jinja2==2.8
+jmespath==0.9.3
+MarkupSafe==0.23
+matplotlib==1.5.1
+mock==2.0.0
+mpld3==0.2
+netCDF4==1.2.4
+nexus-data-access==0.32
+nexusproto==0.3
+numpy==1.11.2
+pbr==1.10.0
+protobuf==2.6.1
+pyasn1==0.3.3
+pyface==4.5.2
+Pygments==2.1.3
+pyparsing==2.1.4
+pyproj==1.9.5.1
+pyshp==1.2.10
+python-dateutil==2.6.1
+pytz==2016.6.1
+PyYAML==3.12
+requests==2.11.0
+rsa==3.4.2
+s3transfer==0.1.10
+scipy==0.17.1
+Shapely==1.5.16
+singledispatch==3.4.0.3
+six==1.10.0
+solrpy==0.9.7
+tornado==4.3
+traits==4.5.0
+traitsui==4.5.1
+utm==0.4.1
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/setup.py
----------------------------------------------------------------------
diff --git a/analysis/setup.py b/analysis/setup.py
new file mode 100644
index 0000000..93449e1
--- /dev/null
+++ b/analysis/setup.py
@@ -0,0 +1,46 @@
+"""
+Copyright (c) 2016 Jet Propulsion Laboratory,
+California Institute of Technology. All rights reserved
+"""
+import setuptools
+
+__version__ = '1.5'
+
+setuptools.setup(
+ name="nexusanalysis",
+ version=__version__,
+ url="https://github.jpl.nasa.gov/thuang/nexus",
+
+ author="Team Nexus",
+
+ description="NEXUS API.",
+ long_description=open('README.md').read(),
+
+ packages=['webservice', 'webservice.algorithms', 'webservice.algorithms.doms', 'webservice.algorithms_spark'],
+ package_data={'webservice': ['config/web.ini', 'config/algorithms.ini'],
+ 'webservice.algorithms.doms': ['domsconfig.ini']},
+ data_files=[
+ ('static', ['static/index.html'])
+ ],
+ platforms='any',
+
+ install_requires=[
+ 'nexus-data-access',
+ 'tornado',
+ 'singledispatch',
+ 'pytz',
+ 'cython',
+ 'requests',
+ 'utm',
+ 'shapely',
+ 'mock',
+ 'backports.functools-lru-cache==1.3'
+ ],
+
+ classifiers=[
+ 'Development Status :: 1 - Pre-Alpha',
+ 'Intended Audience :: Developers',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2.7',
+ ]
+)
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/static/index.html
----------------------------------------------------------------------
diff --git a/analysis/static/index.html b/analysis/static/index.html
new file mode 100644
index 0000000..5ef648a
--- /dev/null
+++ b/analysis/static/index.html
@@ -0,0 +1 @@
+It works!
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/test.py
----------------------------------------------------------------------
diff --git a/analysis/test.py b/analysis/test.py
new file mode 100644
index 0000000..85a3ef1
--- /dev/null
+++ b/analysis/test.py
@@ -0,0 +1,13 @@
+"""
+Copyright (c) 2016 Jet Propulsion Laboratory,
+California Institute of Technology. All rights reserved
+"""
+import numpy as np
+
+A = np.arange(12).reshape(3, 4)
+b = np.arange(3).reshape(1, 3)
+
+# np.linalg.lstsq(A,b)
+# This gives "LinAlgError: Incompatible dimensions" exception
+
+print np.linalg.lstsq(A, b.T)
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/tests/__init__.py
----------------------------------------------------------------------
diff --git a/analysis/tests/__init__.py b/analysis/tests/__init__.py
new file mode 100644
index 0000000..bd9282c
--- /dev/null
+++ b/analysis/tests/__init__.py
@@ -0,0 +1,4 @@
+"""
+Copyright (c) 2016 Jet Propulsion Laboratory,
+California Institute of Technology. All rights reserved
+"""
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/tests/algorithms/StandardDeviationSearch_test.py
----------------------------------------------------------------------
diff --git a/analysis/tests/algorithms/StandardDeviationSearch_test.py b/analysis/tests/algorithms/StandardDeviationSearch_test.py
new file mode 100644
index 0000000..8ba9796
--- /dev/null
+++ b/analysis/tests/algorithms/StandardDeviationSearch_test.py
@@ -0,0 +1,212 @@
+"""
+Copyright (c) 2016 Jet Propulsion Laboratory,
+California Institute of Technology. All rights reserved
+"""
+import json
+import unittest
+import urllib
+from multiprocessing.pool import ThreadPool
+from unittest import skip
+
+import numpy as np
+from mock import Mock
+from nexustiles.model.nexusmodel import Tile, BBox
+from nexustiles.nexustiles import NexusTileService
+from tornado.testing import AsyncHTTPTestCase, bind_unused_port
+from tornado.web import Application
+
+from webservice.NexusHandler import AlgorithmModuleWrapper
+from webservice.algorithms import StandardDeviationSearch
+from webservice.webapp import ModularNexusHandlerWrapper
+
+
+class HttpParametersTest(AsyncHTTPTestCase):
+ def get_app(self):
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path
+ algorithm = AlgorithmModuleWrapper(StandardDeviationSearch.StandardDeviationSearchHandlerImpl)
+ thread_pool = ThreadPool(processes=1)
+ return Application(
+ [(path, ModularNexusHandlerWrapper, dict(clazz=algorithm, algorithm_config=None, thread_pool=thread_pool))],
+ default_host=bind_unused_port()
+ )
+
+ def test_no_ds_400(self):
+ response = self.fetch(StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path)
+ self.assertEqual(400, response.code)
+ body = json.loads(response.body)
+ self.assertEqual("'ds' argument is required", body['error'])
+
+ def test_no_longitude_400(self):
+ params = {
+ "ds": "dataset"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ response = self.fetch(path)
+ self.assertEqual(400, response.code)
+ body = json.loads(response.body)
+ self.assertEqual("'longitude' argument is required", body['error'])
+
+ def test_no_latitude_400(self):
+ params = {
+ "ds": "dataset",
+ "longitude": "22.4"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ response = self.fetch(path)
+ self.assertEqual(400, response.code)
+ body = json.loads(response.body)
+ self.assertEqual("'latitude' argument is required", body['error'])
+
+ def test_no_day_or_date_400(self):
+ params = {
+ "ds": "dataset",
+ "longitude": "22.4",
+ "latitude": "-84.32"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ response = self.fetch(path)
+ self.assertEqual(400, response.code)
+ body = json.loads(response.body)
+ self.assertEqual("At least one of 'day' or 'date' arguments are required but not both.", body['error'])
+
+ def test_no_day_not_int_400(self):
+ params = {
+ "ds": "dataset",
+ "longitude": "22.4",
+ "latitude": "-84.32",
+ "day": "yayday"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ response = self.fetch(path)
+ self.assertEqual(400, response.code)
+ body = json.loads(response.body)
+ self.assertEqual("At least one of 'day' or 'date' arguments are required but not both.", body['error'])
+
+ def test_day_and_date_400(self):
+ params = {
+ "ds": "dataset",
+ "longitude": "22.4",
+ "latitude": "-84.32",
+ "day": "35",
+ "date": "1992-01-01T00:00:00Z"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ response = self.fetch(path)
+ self.assertEqual(400, response.code)
+ body = json.loads(response.body)
+ self.assertEqual("At least one of 'day' or 'date' arguments are required but not both.", body['error'])
+
+ def test_no_allInTile_200(self):
+ params = {
+ "ds": "dataset",
+ "longitude": "22.4",
+ "latitude": "-84.32",
+ "day": "35"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ response = self.fetch(path)
+ self.assertEqual(200, response.code)
+
+ def test_allInTile_false_200(self):
+ params = {
+ "ds": "dataset",
+ "longitude": "22.4",
+ "latitude": "-84.32",
+ "date": "1992-01-01T00:00:00Z",
+ "allInTile": "false"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ response = self.fetch(path)
+ self.assertEqual(200, response.code)
+
+ @skip("Integration test only. Works only if you have Solr and Cassandra running locally with data ingested")
+ def test_integration_all_in_tile(self):
+ params = {
+ "ds": "AVHRR_OI_L4_GHRSST_NCEI_CLIM",
+ "longitude": "-177.775",
+ "latitude": "-78.225",
+ "day": "1"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ response = self.fetch(path)
+ self.assertEqual(200, response.code)
+ print response.body
+ body = json.loads(response.body)
+ self.assertEqual(560, len(body['data']))
+
+ @skip("Integration test only. Works only if you have Solr and Cassandra running locally with data ingested")
+ def test_integration_all_in_tile_false(self):
+ params = {
+ "ds": "AVHRR_OI_L4_GHRSST_NCEI_CLIM",
+ "longitude": "-177.875",
+ "latitude": "-78.125",
+ "date": "2016-01-01T00:00:00Z",
+ "allInTile": "false"
+ }
+ path = StandardDeviationSearch.StandardDeviationSearchHandlerImpl.path + '?' + urllib.urlencode(params)
+ # Increase timeouts when debugging
+ # self.http_client.fetch(self.get_url(path), self.stop, connect_timeout=99999999, request_timeout=999999999)
+ # response = self.wait(timeout=9999999999)
+ response = self.fetch(path)
+ self.assertEqual(200, response.code)
+ print response.body
+ body = json.loads(response.body)
+ self.assertAlmostEqual(-177.875, body['data'][0]['longitude'], 3)
+ self.assertAlmostEqual(-78.125, body['data'][0]['latitude'], 3)
+ self.assertAlmostEqual(0.4956, body['data'][0]['standard_deviation'], 4)
+
+
+class TestStandardDeviationSearch(unittest.TestCase):
+ def setUp(self):
+ tile = Tile()
+ tile.bbox = BBox(-1.0, 1.0, -2.0, 2.0)
+ tile.latitudes = np.ma.array([-1.0, -0.5, 0, .5, 1.0])
+ tile.longitudes = np.ma.array([-2.0, -1.0, 0, 1.0, 2.0])
+ tile.times = np.ma.array([0L])
+ tile.data = np.ma.arange(25.0).reshape((1, 5, 5))
+ tile.meta_data = {"std": np.ma.arange(25.0).reshape((1, 5, 5))}
+
+ attrs = {'find_tile_by_polygon_and_most_recent_day_of_year.return_value': [tile]}
+ self.tile_service = Mock(spec=NexusTileService, **attrs)
+
+ def test_get_single_exact_std_dev(self):
+ result = StandardDeviationSearch.get_single_std_dev(self.tile_service, "fake dataset", 1.0, .5, 83)
+ self.assertEqual(1, len(result))
+ self.assertEqual((1.0, 0.5, 18.0), result[0])
+
+ def test_get_single_close_std_dev(self):
+ result = StandardDeviationSearch.get_single_std_dev(self.tile_service, "fake dataset", 1.3, .25, 83)
+ self.assertEqual(1, len(result))
+ self.assertEqual((1.0, 0.0, 13.0), result[0])
+
+ def test_get_all_std_dev(self):
+ result = StandardDeviationSearch.get_all_std_dev(self.tile_service, "fake dataset", 1.3, .25, 83)
+ self.assertEqual(25, len(result))
+
+
+@skip("Integration test only. Works only if you have Solr and Cassandra running locally with data ingested")
+class IntegrationTestStandardDeviationSearch(unittest.TestCase):
+ def setUp(self):
+ self.tile_service = NexusTileService()
+
+ def test_get_single_exact_std_dev(self):
+ result = StandardDeviationSearch.get_single_std_dev(self.tile_service, "AVHRR_OI_L4_GHRSST_NCEI_CLIM", -177.625,
+ -78.375, 1)
+ self.assertEqual(1, len(result))
+ self.assertAlmostEqual(-177.625, result[0][0], 3)
+ self.assertAlmostEqual(-78.375, result[0][1], 3)
+ self.assertAlmostEqual(0.5166, result[0][2], 4)
+
+ def test_get_single_close_std_dev(self):
+ result = StandardDeviationSearch.get_single_std_dev(self.tile_service, "AVHRR_OI_L4_GHRSST_NCEI_CLIM", -177.775,
+ -78.225, 1)
+ self.assertEqual(1, len(result))
+ self.assertAlmostEqual(-177.875, result[0][0], 3)
+ self.assertAlmostEqual(-78.125, result[0][1], 3)
+ self.assertAlmostEqual(0.4956, result[0][2], 4)
+
+ def test_get_all_std_dev(self):
+ result = StandardDeviationSearch.get_all_std_dev(self.tile_service, "AVHRR_OI_L4_GHRSST_NCEI_CLIM", -177.775,
+ -78.225, 1)
+
+ self.assertEqual(560, len(result))
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/tests/algorithms/longitudelatitudemap_test.py
----------------------------------------------------------------------
diff --git a/analysis/tests/algorithms/longitudelatitudemap_test.py b/analysis/tests/algorithms/longitudelatitudemap_test.py
new file mode 100644
index 0000000..0728ab2
--- /dev/null
+++ b/analysis/tests/algorithms/longitudelatitudemap_test.py
@@ -0,0 +1,83 @@
+"""
+Copyright (c) 2016 Jet Propulsion Laboratory,
+California Institute of Technology. All rights reserved
+"""
+import json
+import time
+import unittest
+import urllib
+from multiprocessing.pool import ThreadPool
+from unittest import skip
+
+from mock import MagicMock
+from nexustiles.nexustiles import NexusTileService
+from shapely.geometry import box
+from tornado.testing import AsyncHTTPTestCase, bind_unused_port
+from tornado.web import Application
+
+from NexusHandler import AlgorithmModuleWrapper
+from webapp import ModularNexusHandlerWrapper
+from webmodel import NexusRequestObject
+from webservice.algorithms import LongitudeLatitudeMap
+
+
+class TestLongitudeLatitudeMap(unittest.TestCase):
+ def setUp(self):
+ self.tile_service = NexusTileService()
+
+ def test_lin_reg(self):
+ LongitudeLatitudeMap.tile_service = self.tile_service
+ print next(
+ LongitudeLatitudeMap.regression_on_tiles((175.01, -42.68, 180.0, -40.2), box(-180, -90, 180, 90).wkt, 1,
+ time.time(), "JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1"))
+
+ def test_lat_lon_map_driver_mur(self):
+ # LongitudeLatitudeMap.tile_service = self.tile_service
+ print next(iter(LongitudeLatitudeMap.lat_lon_map_driver(box(-180, -90, 180, 90), 1, time.time(),
+ "JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1",
+ [(175.01, -42.68, 180.0, -40.2)])))
+
+ def test_lat_lon_map_driver_ecco(self):
+ bounding = box(-148, 38, -129, 53)
+ ds = "MXLDEPTH_ECCO_version4_release1"
+ start_seconds_from_epoch = 1
+ end_seconds_from_epoch = time.time()
+ boxes = self.tile_service.get_distinct_bounding_boxes_in_polygon(bounding, ds,
+ start_seconds_from_epoch,
+ end_seconds_from_epoch)
+ print LongitudeLatitudeMap.LongitudeLatitudeMapHandlerImpl.results_to_dicts(
+ LongitudeLatitudeMap.lat_lon_map_driver(bounding, start_seconds_from_epoch, end_seconds_from_epoch, ds,
+ [a_box.bounds for a_box in boxes]))
+
+
+class HttpIntegrationTest(unittest.TestCase):
+ def get_app(self):
+ path = LongitudeLatitudeMap.LongitudeLatitudeMapHandlerImpl.path
+ algorithm = AlgorithmModuleWrapper(LongitudeLatitudeMap.LongitudeLatitudeMapHandlerImpl)
+ thread_pool = ThreadPool(processes=1)
+ return Application(
+ [(path, ModularNexusHandlerWrapper, dict(clazz=algorithm, algorithm_config=None, thread_pool=thread_pool))],
+ default_host=bind_unused_port()
+ )
+
+ # @skip("Integration test only. Works only if you have Solr and Cassandra running locally with data ingested")
+ def test_integration_all_in_tile(self):
+ def get_argument(*args, **kwargs):
+ params = {
+ "ds": "MXLDEPTH_ECCO_version4_release1",
+ "minLon": "-45",
+ "minLat": "0",
+ "maxLon": "0",
+ "maxLat": "45",
+ "startTime": "1992-01-01T00:00:00Z",
+ "endTime": "2016-12-01T00:00:00Z"
+ }
+ return params[args[0]]
+ request_handler_mock = MagicMock()
+ request_handler_mock.get_argument.side_effect = get_argument
+ request = NexusRequestObject(request_handler_mock)
+ handler_impl = LongitudeLatitudeMap.LongitudeLatitudeMapHandlerImpl()
+
+ response = handler_impl.calc(request)
+
+ print response.toJson()
http://git-wip-us.apache.org/repos/asf/incubator-sdap-nexus/blob/ff98fa34/analysis/tests/algorithms_spark/Matchup_test.py
----------------------------------------------------------------------
diff --git a/analysis/tests/algorithms_spark/Matchup_test.py b/analysis/tests/algorithms_spark/Matchup_test.py
new file mode 100644
index 0000000..48271a1
--- /dev/null
+++ b/analysis/tests/algorithms_spark/Matchup_test.py
@@ -0,0 +1,311 @@
+"""
+Copyright (c) 2016 Jet Propulsion Laboratory,
+California Institute of Technology. All rights reserved
+"""
+import random
+import timeit
+import unittest
+import pickle
+import json
+
+import numpy as np
+from webservice.algorithms_spark.Matchup import *
+
+
+class TestMatch_Points(unittest.TestCase):
+ def test_one_point_match_exact(self):
+ primary = DomsPoint(longitude=1.0, latitude=2.0, time=1000, depth=5.0, data_id=1)
+ matchup = DomsPoint(longitude=1.0, latitude=2.0, time=1000, depth=5.0, data_id=2)
+
+ primary_points = [primary]
+ matchup_points = [matchup]
+
+ matches = list(match_points_generator(primary_points, matchup_points, 0))
+
+ self.assertEquals(1, len(matches))
+
+ p_match_point, match = matches[0]
+
+ self.assertEqual(primary, p_match_point)
+ self.assertEqual(matchup, match)
+
+ def test_one_point_match_within_tolerance_150km(self):
+ primary = DomsPoint(longitude=1.0, latitude=2.0, time=1000, depth=5.0, data_id=1)
+ matchup = DomsPoint(longitude=1.0, latitude=3.0, time=1000, depth=5.0, data_id=2)
+
+ primary_points = [primary]
+ matchup_points = [matchup]
+
+ matches = list(match_points_generator(primary_points, matchup_points, 150000)) # tolerance 150 km
+
+ self.assertEquals(1, len(matches))
+
+ p_match_point, match = matches[0]
+
+ self.assertEqual(primary, p_match_point)
+ self.assertEqual(matchup, match)
+
+ def test_one_point_match_within_tolerance_200m(self):
+ primary = DomsPoint(longitude=1.0, latitude=2.0, time=1000, depth=5.0, data_id=1)
+ matchup = DomsPoint(longitude=1.001, latitude=2.0, time=1000, depth=5.0, data_id=2)
+
+ primary_points = [primary]
+ matchup_points = [matchup]
+
+ matches = list(match_points_generator(primary_points, matchup_points, 200)) # tolerance 200 m
+
+ self.assertEquals(1, len(matches))
+
+ p_match_point, match = matches[0]
+
+ self.assertEqual(primary, p_match_point)
+ self.assertEqual(matchup, match)
+
+ def test_one_point_not_match_tolerance_150km(self):
+ primary = DomsPoint(longitude=1.0, latitude=2.0, time=1000, depth=5.0, data_id=1)
+ matchup = DomsPoint(longitude=1.0, latitude=4.0, time=1000, depth=5.0, data_id=2)
+
+ primary_points = [primary]
+ matchup_points = [matchup]
+
+ matches = list(match_points_generator(primary_points, matchup_points, 150000)) # tolerance 150 km
+
+ self.assertEquals(0, len(matches))
+
+ def test_one_point_not_match_tolerance_100m(self):
+ primary = DomsPoint(longitude=1.0, latitude=2.0, time=1000, depth=5.0, data_id=1)
+ matchup = DomsPoint(longitude=1.001, latitude=2.0, time=1000, depth=5.0, data_id=2)
+
+ primary_points = [primary]
+ matchup_points = [matchup]
+
+ matches = list(match_points_generator(primary_points, matchup_points, 100)) # tolerance 100 m
+
+ self.assertEquals(0, len(matches))
+
+ def test_multiple_point_match(self):
+ primary = DomsPoint(longitude=1.0, latitude=2.0, time=1000, depth=5.0, data_id=1)
+ primary_points = [primary]
+
+ matchup_points = [
+ DomsPoint(longitude=1.0, latitude=3.0, time=1000, depth=10.0, data_id=2),
+ DomsPoint(longitude=2.0, latitude=2.0, time=1000, depth=0.0, data_id=3),
+ DomsPoint(longitude=0.5, latitude=1.5, time=1000, depth=3.0, data_id=4)
+ ]
+
+ matches = list(match_points_generator(primary_points, matchup_points, 150000)) # tolerance 150 km
+
+ self.assertEquals(3, len(matches))
+
+ self.assertSetEqual({primary}, {x[0] for x in matches})
+
+ list_of_matches = [x[1] for x in matches]
+
+ self.assertEquals(3, len(list_of_matches))
+ self.assertItemsEqual(matchup_points, list_of_matches)
+
+ def test_multiple_point_match_multiple_times(self):
+ primary_points = [
+ DomsPoint(longitude=1.0, latitude=2.0, time=1000, depth=5.0, data_id=1),
+ DomsPoint(longitude=1.5, latitude=1.5, time=1000, depth=5.0, data_id=2)
+ ]
+
+ matchup_points = [
+ DomsPoint(longitude=1.0, latitude=3.0, time=1000, depth=10.0, data_id=3),
+ DomsPoint(longitude=2.0, latitude=2.0, time=1000, depth=0.0, data_id=4),
+ DomsPoint(longitude=0.5, latitude=1.5, time=1000, depth=3.0, data_id=5)
+ ]
+
+ matches = list(match_points_generator(primary_points, matchup_points, 150000)) # tolerance 150 km
+
+ self.assertEquals(5, len(matches))
+
+ self.assertSetEqual({p for p in primary_points}, {x[0] for x in matches})
+
+ # First primary point matches all 3 secondary
+ self.assertEquals(3, [x[0] for x in matches].count(primary_points[0]))
+ self.assertItemsEqual(matchup_points, [x[1] for x in matches if x[0] == primary_points[0]])
+
+ # Second primary point matches only last 2 secondary
+ self.assertEquals(2, [x[0] for x in matches].count(primary_points[1]))
+ self.assertItemsEqual(matchup_points[1:], [x[1] for x in matches if x[0] == primary_points[1]])
+
+ def test_one_of_many_primary_matches_one_of_many_matchup(self):
+ primary_points = [
+ DomsPoint(longitude=-33.76764, latitude=30.42946, time=1351553994, data_id=1),
+ DomsPoint(longitude=-33.75731, latitude=29.86216, time=1351554004, data_id=2)
+ ]
+
+ matchup_points = [
+ DomsPoint(longitude=-33.762, latitude=28.877, time=1351521432, depth=3.973, data_id=3),
+ DomsPoint(longitude=-34.916, latitude=28.879, time=1351521770, depth=2.9798, data_id=4),
+ DomsPoint(longitude=-31.121, latitude=31.256, time=1351519892, depth=4.07, data_id=5)
+ ]
+
+ matches = list(match_points_generator(primary_points, matchup_points, 110000)) # tolerance 110 km
+
+ self.assertEquals(1, len(matches))
+
+ self.assertSetEqual({p for p in primary_points if p.data_id == 2}, {x[0] for x in matches})
+
+ # First primary point matches none
+ self.assertEquals(0, [x[0] for x in matches].count(primary_points[0]))
+
+ # Second primary point matches only first secondary
+ self.assertEquals(1, [x[0] for x in matches].count(primary_points[1]))
+ self.assertItemsEqual(matchup_points[0:1], [x[1] for x in matches if x[0] == primary_points[1]])
+
+ @unittest.skip("This test is just for timing, doesn't actually assert anything.")
+ def test_time_many_primary_many_matchup(self):
+ import logging
+ import sys
+ logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
+ datefmt="%Y-%m-%dT%H:%M:%S", stream=sys.stdout)
+ log = logging.getLogger(__name__)
+ # Generate 160000 DomsPoints distributed equally in a box from -2.0 lat/lon to 2.0 lat/lon
+ log.info("Generating primary points")
+ x = np.arange(-2.0, 2.0, 0.01)
+ y = np.arange(-2.0, 2.0, 0.01)
+ primary_points = [DomsPoint(longitude=xy[0], latitude=xy[1], time=1000, depth=5.0, data_id=i) for i, xy in
+ enumerate(np.array(np.meshgrid(x, y)).T.reshape(-1, 2))]
+
+ # Generate 2000 DomsPoints distributed randomly in a box from -2.0 lat/lon to 2.0 lat/lon
+ log.info("Generating matchup points")
+ matchup_points = [
+ DomsPoint(longitude=random.uniform(-2.0, 2.0), latitude=random.uniform(-2.0, 2.0), time=1000, depth=5.0,
+ data_id=i) for i in xrange(0, 2000)]
+
+ log.info("Starting matchup")
+ log.info("Best of repeat(3, 2) matchups: %s seconds" % min(
+ timeit.repeat(lambda: list(match_points_generator(primary_points, matchup_points, 1500)), repeat=3,
+ number=2)))
+
+
+class TestDOMSPoint(unittest.TestCase):
+ def test_is_pickleable(self):
+ edge_point = json.loads("""{
+"id": "argo-profiles-5903995(46, 0)",
+"time": "2012-10-15T14:24:04Z",
+"point": "-33.467 29.728",
+"sea_water_temperature": 24.5629997253,
+"sea_water_temperature_depth": 2.9796258642,
+"wind_speed": null,
+"sea_water_salinity": null,
+"sea_water_salinity_depth": null,
+"platform": 4,
+"device": 3,
+"fileurl": "ftp://podaac-ftp.jpl.nasa.gov/allData/insitu/L2/spurs1/argo/argo-profiles-5903995.nc"
+}""")
+ point = DomsPoint.from_edge_point(edge_point)
+ self.assertIsNotNone(pickle.dumps(point))
+
+
+def check_all():
+ return check_solr() and check_cass() and check_edge()
+
+
+def check_solr():
+ # TODO eventually this might do something.
+ return False
+
+
+def check_cass():
+ # TODO eventually this might do something.
+ return False
+
+
+def check_edge():
+ # TODO eventually this might do something.
+ return False
+
+
+@unittest.skipUnless(check_all(),
+ "These tests require local instances of Solr, Cassandra, and Edge to be running.")
+class TestMatchup(unittest.TestCase):
+ def setUp(self):
+ from os import environ
+ environ['PYSPARK_DRIVER_PYTHON'] = '/Users/greguska/anaconda/envs/nexus-analysis/bin/python2.7'
+ environ['PYSPARK_PYTHON'] = '/Users/greguska/anaconda/envs/nexus-analysis/bin/python2.7'
+ environ['SPARK_HOME'] = '/Users/greguska/sandbox/spark-2.0.0-bin-hadoop2.7'
+
+ def test_mur_match(self):
+ from shapely.wkt import loads
+ from nexustiles.nexustiles import NexusTileService
+
+ polygon = loads("POLYGON((-34.98 29.54, -30.1 29.54, -30.1 31.00, -34.98 31.00, -34.98 29.54))")
+ primary_ds = "JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1"
+ matchup_ds = "spurs"
+ parameter = "sst"
+ start_time = 1350259200 # 2012-10-15T00:00:00Z
+ end_time = 1350345600 # 2012-10-16T00:00:00Z
+ time_tolerance = 86400
+ depth_tolerance = 5.0
+ radius_tolerance = 1500.0
+ platforms = "1,2,3,4,5,6,7,8,9"
+
+ tile_service = NexusTileService()
+ tile_ids = [tile.tile_id for tile in
+ tile_service.find_tiles_in_polygon(polygon, primary_ds, start_time, end_time, fetch_data=False,
+ fl='id')]
+ result = spark_matchup_driver(tile_ids, wkt.dumps(polygon), primary_ds, matchup_ds, parameter, time_tolerance,
+ depth_tolerance, radius_tolerance, platforms)
+ for k, v in result.iteritems():
+ print "primary: %s\n\tmatches:\n\t\t%s" % (
+ "lon: %s, lat: %s, time: %s, sst: %s" % (k.longitude, k.latitude, k.time, k.sst),
+ '\n\t\t'.join(
+ ["lon: %s, lat: %s, time: %s, sst: %s" % (i.longitude, i.latitude, i.time, i.sst) for i in v]))
+
+ def test_smap_match(self):
+ from shapely.wkt import loads
+ from nexustiles.nexustiles import NexusTileService
+
+ polygon = loads("POLYGON((-34.98 29.54, -30.1 29.54, -30.1 31.00, -34.98 31.00, -34.98 29.54))")
+ primary_ds = "SMAP_L2B_SSS"
+ matchup_ds = "spurs"
+ parameter = "sss"
+ start_time = 1350259200 # 2012-10-15T00:00:00Z
+ end_time = 1350345600 # 2012-10-16T00:00:00Z
+ time_tolerance = 86400
+ depth_tolerance = 5.0
+ radius_tolerance = 1500.0
+ platforms = "1,2,3,4,5,6,7,8,9"
+
+ tile_service = NexusTileService()
+ tile_ids = [tile.tile_id for tile in
+ tile_service.find_tiles_in_polygon(polygon, primary_ds, start_time, end_time, fetch_data=False,
+ fl='id')]
+ result = spark_matchup_driver(tile_ids, wkt.dumps(polygon), primary_ds, matchup_ds, parameter, time_tolerance,
+ depth_tolerance, radius_tolerance, platforms)
+ for k, v in result.iteritems():
+ print "primary: %s\n\tmatches:\n\t\t%s" % (
+ "lon: %s, lat: %s, time: %s, sst: %s" % (k.longitude, k.latitude, k.time, k.sst),
+ '\n\t\t'.join(
+ ["lon: %s, lat: %s, time: %s, sst: %s" % (i.longitude, i.latitude, i.time, i.sst) for i in v]))
+
+ def test_ascatb_match(self):
+ from shapely.wkt import loads
+ from nexustiles.nexustiles import NexusTileService
+
+ polygon = loads("POLYGON((-34.98 29.54, -30.1 29.54, -30.1 31.00, -34.98 31.00, -34.98 29.54))")
+ primary_ds = "ASCATB-L2-Coastal"
+ matchup_ds = "spurs"
+ parameter = "wind"
+ start_time = 1351468800 # 2012-10-29T00:00:00Z
+ end_time = 1351555200 # 2012-10-30T00:00:00Z
+ time_tolerance = 86400
+ depth_tolerance = 5.0
+ radius_tolerance = 110000.0 # 110 km
+ platforms = "1,2,3,4,5,6,7,8,9"
+
+ tile_service = NexusTileService()
+ tile_ids = [tile.tile_id for tile in
+ tile_service.find_tiles_in_polygon(polygon, primary_ds, start_time, end_time, fetch_data=False,
+ fl='id')]
+ result = spark_matchup_driver(tile_ids, wkt.dumps(polygon), primary_ds, matchup_ds, parameter, time_tolerance,
+ depth_tolerance, radius_tolerance, platforms)
+ for k, v in result.iteritems():
+ print "primary: %s\n\tmatches:\n\t\t%s" % (
+ "lon: %s, lat: %s, time: %s, wind u,v: %s,%s" % (k.longitude, k.latitude, k.time, k.wind_u, k.wind_v),
+ '\n\t\t'.join(
+ ["lon: %s, lat: %s, time: %s, wind u,v: %s,%s" % (
+ i.longitude, i.latitude, i.time, i.wind_u, i.wind_v) for i in v]))