You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@rya.apache.org by dl...@apache.org on 2017/08/30 20:31:45 UTC

[07/14] incubator-rya git commit: RYA-324, RYA-272 Geo refactoring and examples closes #182

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9e76b8d7/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoGeoIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoGeoIndexerTest.java b/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoGeoIndexerTest.java
new file mode 100644
index 0000000..93cabc4
--- /dev/null
+++ b/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoGeoIndexerTest.java
@@ -0,0 +1,370 @@
+package org.apache.rya.indexing.mongo;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+
+import static org.apache.rya.api.resolver.RdfToRyaConversions.convertStatement;
+import static org.apache.rya.indexing.GeoIndexingTestUtils.getSet;
+
+import java.util.Collections;
+import java.util.Set;
+
+import org.apache.rya.indexing.GeoConstants;
+import org.apache.rya.indexing.StatementConstraints;
+import org.apache.rya.indexing.accumulo.ConfigUtils;
+import org.apache.rya.indexing.geotemporal.mongo.MongoITBase;
+import org.apache.rya.indexing.accumulo.geo.OptionalConfigUtils;
+import org.apache.rya.indexing.mongodb.geo.MongoGeoIndexer;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ContextStatementImpl;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.impl.ValueFactoryImpl;
+
+import com.google.common.collect.Sets;
+import com.vividsolutions.jts.geom.Coordinate;
+import com.vividsolutions.jts.geom.GeometryFactory;
+import com.vividsolutions.jts.geom.LinearRing;
+import com.vividsolutions.jts.geom.Point;
+import com.vividsolutions.jts.geom.Polygon;
+import com.vividsolutions.jts.geom.PrecisionModel;
+import com.vividsolutions.jts.geom.impl.PackedCoordinateSequence;
+
+public class MongoGeoIndexerTest extends MongoITBase {
+    private static final StatementConstraints EMPTY_CONSTRAINTS = new StatementConstraints();
+    GeometryFactory gf = new GeometryFactory(new PrecisionModel(), 4326);
+
+    @Before
+    public void before() throws Exception {
+        conf.set(ConfigUtils.GEO_PREDICATES_LIST, "http://www.opengis.net/ont/geosparql#asWKT");
+        conf.set(OptionalConfigUtils.USE_GEO, "true");
+    }
+
+    @Test
+    public void testRestrictPredicatesSearch() throws Exception {
+        conf.setStrings(ConfigUtils.GEO_PREDICATES_LIST, "pred:1,pred:2");
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+
+            final Point point = gf.createPoint(new Coordinate(10, 10));
+            final Value pointValue = vf.createLiteral("Point(10 10)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final URI invalidPredicate = GeoConstants.GEO_AS_WKT;
+
+            // These should not be stored because they are not in the predicate list
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), invalidPredicate, pointValue)));
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), invalidPredicate, pointValue)));
+
+            final URI pred1 = vf.createURI("pred:1");
+            final URI pred2 = vf.createURI("pred:2");
+
+            // These should be stored because they are in the predicate list
+            final Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1, pointValue);
+            final Statement s4 = new StatementImpl(vf.createURI("foo:subj4"), pred2, pointValue);
+            f.storeStatement(convertStatement(s3));
+            f.storeStatement(convertStatement(s4));
+
+            // This should not be stored because the object is not valid wkt
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj5"), pred1, vf.createLiteral("soint(10 10)"))));
+
+            // This should not be stored because the object is not a literal
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj6"), pred1, vf.createURI("p:Point(10 10)"))));
+
+            f.flush();
+
+            final Set<Statement> actual = getSet(f.queryEquals(point, EMPTY_CONSTRAINTS));
+            Assert.assertEquals(2, actual.size());
+            Assert.assertTrue(actual.contains(s3));
+            Assert.assertTrue(actual.contains(s4));
+        }
+    }
+
+    @Test
+    public void testPrimeMeridianSearch() throws Exception {
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+            final Resource subject = vf.createURI("foo:subj");
+            final URI predicate = GeoConstants.GEO_AS_WKT;
+            final Value object = vf.createLiteral("Point(0 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Resource context = vf.createURI("foo:context");
+
+            final Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            final double[] ONE = { 1, 1, -1, 1, -1, -1, 1, -1, 1, 1 };
+            final double[] TWO = { 2, 2, -2, 2, -2, -2, 2, -2, 2, 2 };
+            final double[] THREE = { 3, 3, -3, 3, -3, -3, 3, -3, 3, 3 };
+
+            final LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
+            final LinearRing r2 = gf.createLinearRing(new PackedCoordinateSequence.Double(TWO, 2));
+            final LinearRing r3 = gf.createLinearRing(new PackedCoordinateSequence.Double(THREE, 2));
+
+            final Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            final Polygon p2 = gf.createPolygon(r2, new LinearRing[] {});
+            final Polygon p3 = gf.createPolygon(r3, new LinearRing[] {});
+
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p2, EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p3, EMPTY_CONSTRAINTS)));
+
+            // Test a ring with a hole in it
+            final Polygon p3m2 = gf.createPolygon(r3, new LinearRing[] { r2 });
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p3m2, EMPTY_CONSTRAINTS)));
+
+            // test a ring outside the point
+            final double[] OUT = { 3, 3, 1, 3, 1, 1, 3, 1, 3, 3 };
+            final LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
+            final Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+        }
+    }
+
+    @Test
+    public void testDcSearch() throws Exception {
+        // test a ring around dc
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+            final Resource subject = vf.createURI("foo:subj");
+            final URI predicate = GeoConstants.GEO_AS_WKT;
+            final Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Resource context = vf.createURI("foo:context");
+
+            final Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            final double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            final LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            final Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+
+            // test a ring outside the point
+            final double[] OUT = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
+            final LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
+            final Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+        }
+    }
+
+    @Test
+    public void testDeleteSearch() throws Exception {
+        // test a ring around dc
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+            final Resource subject = vf.createURI("foo:subj");
+            final URI predicate = GeoConstants.GEO_AS_WKT;
+            final Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Resource context = vf.createURI("foo:context");
+
+            final Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            f.deleteStatement(convertStatement(statement));
+
+            // test a ring that the point would be inside of if not deleted
+            final double[] in = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            final LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(in, 2));
+            final Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+
+            // test a ring that the point would be outside of if not deleted
+            final double[] out = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
+            final LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(out, 2));
+            final Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+
+            // test a ring for the whole world and make sure the point is gone
+            // Geomesa is a little sensitive around lon 180, so we only go to 179
+            final double[] world = { -180, 90, 179, 90, 179, -90, -180, -90, -180, 90 };
+            final LinearRing rWorld = gf.createLinearRing(new PackedCoordinateSequence.Double(world, 2));
+            final Polygon pWorld = gf.createPolygon(rWorld, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pWorld, EMPTY_CONSTRAINTS)));
+        }
+    }
+
+    @Test
+    public void testDcSearchWithContext() throws Exception {
+        // test a ring around dc
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+            final Resource subject = vf.createURI("foo:subj");
+            final URI predicate = GeoConstants.GEO_AS_WKT;
+            final Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Resource context = vf.createURI("foo:context");
+
+            final Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            final double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            final LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            final Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct context
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementConstraints().setContext(context))));
+
+            // query with wrong context
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementConstraints().setContext(vf.createURI("foo:context2")))));
+        }
+    }
+
+    @Test
+    public void testDcSearchWithSubject() throws Exception {
+        // test a ring around dc
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+            final Resource subject = vf.createURI("foo:subj");
+            final URI predicate = GeoConstants.GEO_AS_WKT;
+            final Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Resource context = vf.createURI("foo:context");
+
+            final Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            final double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            final LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            final Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct subject
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementConstraints().setSubject(subject))));
+
+            // query with wrong subject
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementConstraints().setSubject(vf.createURI("foo:subj2")))));
+        }
+    }
+
+    @Test
+    public void testDcSearchWithSubjectAndContext() throws Exception {
+        // test a ring around dc
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+            final Resource subject = vf.createURI("foo:subj");
+            final URI predicate = GeoConstants.GEO_AS_WKT;
+            final Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Resource context = vf.createURI("foo:context");
+
+            final Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            final double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            final LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            final Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct context subject
+            Assert.assertEquals(Sets.newHashSet(statement),
+                    getSet(f.queryWithin(p1, new StatementConstraints().setContext(context).setSubject(subject))));
+
+            // query with wrong context
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementConstraints().setContext(vf.createURI("foo:context2")))));
+
+            // query with wrong subject
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementConstraints().setSubject(vf.createURI("foo:subj2")))));
+        }
+    }
+
+    @Test
+    public void testDcSearchWithPredicate() throws Exception {
+        // test a ring around dc
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+            final Resource subject = vf.createURI("foo:subj");
+            final URI predicate = GeoConstants.GEO_AS_WKT;
+            final Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Resource context = vf.createURI("foo:context");
+
+            final Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            final double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            final LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            final Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct Predicate
+            Assert.assertEquals(Sets.newHashSet(statement),
+                    getSet(f.queryWithin(p1, new StatementConstraints().setPredicates(Collections.singleton(predicate)))));
+
+            // query with wrong predicate
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementConstraints().setPredicates(Collections.singleton(vf.createURI("other:pred"))))));
+        }
+    }
+
+    // @Test
+    public void testAntiMeridianSearch() throws Exception {
+        // verify that a search works if the bounding box crosses the anti meridian
+        try (final MongoGeoIndexer f = new MongoGeoIndexer()) {
+            f.initIndexer(conf, super.getMongoClient());
+
+            final ValueFactory vf = new ValueFactoryImpl();
+            final Resource context = vf.createURI("foo:context");
+
+            final Resource subjectEast = vf.createURI("foo:subj:east");
+            final URI predicateEast = GeoConstants.GEO_AS_WKT;
+            final Value objectEast = vf.createLiteral("Point(179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Statement statementEast = new ContextStatementImpl(subjectEast, predicateEast, objectEast, context);
+            f.storeStatement(convertStatement(statementEast));
+
+            final Resource subjectWest = vf.createURI("foo:subj:west");
+            final URI predicateWest = GeoConstants.GEO_AS_WKT;
+            final Value objectWest = vf.createLiteral("Point(-179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            final Statement statementWest = new ContextStatementImpl(subjectWest, predicateWest, objectWest, context);
+            f.storeStatement(convertStatement(statementWest));
+
+            f.flush();
+
+            final double[] ONE = { 178.1, 1, -178, 1, -178, -1, 178.1, -1, 178.1, 1 };
+
+            final LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
+
+            final Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            Assert.assertEquals(Sets.newHashSet(statementEast, statementWest), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9e76b8d7/extras/rya.geoindexing/pom.xml
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/pom.xml b/extras/rya.geoindexing/pom.xml
index a2127aa..9ba4ed0 100644
--- a/extras/rya.geoindexing/pom.xml
+++ b/extras/rya.geoindexing/pom.xml
@@ -11,27 +11,44 @@
     OF ANY KIND, either express or implied. See the License for the specific 
     language governing permissions and limitations under the License. -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.rya</groupId>
-        <artifactId>rya.extras</artifactId>
-        <version>3.2.11-incubating-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>rya.geoindexing</artifactId>
-    <name>Apache Rya Geospatial Secondary Indexing (Optional)</name>
-
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.rya</groupId>
+    <artifactId>rya.extras</artifactId>
+    <version>3.2.11-incubating-SNAPSHOT</version>
+  </parent>
+  <artifactId>rya.geoindexing</artifactId>
+  <packaging>pom</packaging>
+  <name>Apache Rya Geo Indexing Projects</name>
+  <description>This parent has several alternative implementations using different libraries or versions of the same library.  
+  Specifically 
+          Geomesa depends on geotools v5.1 and 
+          GeoWave depends on geotools v6.
+  See the module poms for the actual versions.
+  </description>
+     <modules>
+        <!-- common for all implementations  -->
+     	 <!-- geomesa for accumulo, uses geotools (not compatible with geowave's dependencies)   -->
+     	 <!-- geo wave for accumulo, uses geotools (not compatible with geomesa's dependencies)  -->
+     	 <!-- mongo native geo, not accumulo  -->
+     	<module>geo.common</module>
+     	<module>geo.geomesa</module>
+     	<module>geo.geowave</module>
+     	<module>geo.mongo</module>
+     </modules>
     <properties>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <geotools.version>14.3</geotools.version>
     </properties>
 
-    <dependencies>
-
-        <dependency>
-            <groupId>org.apache.accumulo</groupId>
+	<dependencies>
+		<dependency>
+			<groupId>com.vividsolutions</groupId>
+			<artifactId>jts</artifactId>
+			<version>1.13</version>
+		</dependency>
+		<dependency>
+		   <groupId>org.apache.accumulo</groupId>
             <artifactId>accumulo-minicluster</artifactId>
             <scope>test</scope>
         </dependency>
@@ -78,34 +95,16 @@
             <artifactId>commons-codec</artifactId>
         </dependency>
 
-        <!-- Geo Indexing -->
-        <dependency>
-            <groupId>org.locationtech.geomesa</groupId>
-            <artifactId>geomesa-accumulo-datastore_2.11</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>mil.nga.giat</groupId>
-            <artifactId>geowave-datastore-accumulo</artifactId>
-            <version>${geowave.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>mil.nga.giat</groupId>
-            <artifactId>geowave-adapter-vector</artifactId>
-            <version>${geowave.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-all</artifactId>
-            <scope>test</scope>
-        </dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.mockito</groupId>
+			<artifactId>mockito-all</artifactId>
+			<scope>test</scope>
+		</dependency>
         <dependency>
             <groupId>org.apache.rya</groupId>
             <artifactId>accumulo.rya</artifactId>
@@ -118,104 +117,90 @@
             <type>test-jar</type>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>org.geotools.xsd</groupId>
-            <artifactId>gt-xsd-gml3</artifactId>
-            <version>${geotools.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.geotools</groupId>
-            <artifactId>gt-api</artifactId>
-            <version>${geotools.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.vividsolutions</groupId>
-            <artifactId>jts</artifactId>
-            <version>1.13</version>
-        </dependency>
     </dependencies>
-    <build>
-        <pluginManagement>
-            <plugins>
-                <plugin>
-                    <groupId>org.apache.rat</groupId>
-                    <artifactId>apache-rat-plugin</artifactId>
-                    <configuration>
-                        <excludes>
-                            <!-- RDF data Files -->
-                            <exclude>**/*.ttl</exclude>
+	<build>
+		<pluginManagement>
+			<plugins>
+				<plugin>
+					<groupId>org.apache.rat</groupId>
+					<artifactId>apache-rat-plugin</artifactId>
+					<configuration>
+						<excludes>
+							<!-- RDF data Files -->
+							<exclude>**/*.ttl</exclude>
+
+							<!-- Services Files -->
+							<exclude>**/resources/META-INF/services/**</exclude>
+						</excludes>
+					</configuration>
+				</plugin>
+			</plugins>
+		</pluginManagement>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-shade-plugin</artifactId>
+				<executions>
+					<execution>
+						<goals>
+							<goal>shade</goal>
+						</goals>
+						<configuration>
+							<shadedArtifactAttached>true</shadedArtifactAttached>
+							<shadedClassifierName>map-reduce</shadedClassifierName>
+							<transformers>
+								<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+							</transformers>
+							<filters>
+								<filter>
+									<artifact>*:*</artifact>
+									<excludes>
+										<exclude>META-INF/*.SF</exclude>
+										<exclude>META-INF/*.DSA</exclude>
+										<exclude>META-INF/*.RSA</exclude>
+									</excludes>
+								</filter>
+							</filters>
+						</configuration>
+					</execution>
+					<execution>
+						<id>accumulo-server</id>
+						<phase>package</phase>
+						<goals>
+							<goal>shade</goal>
+						</goals>
+						<configuration>
+							<shadedArtifactAttached>true</shadedArtifactAttached>
+							<shadedClassifierName>accumulo-server</shadedClassifierName>
+							<artifactSet>
+								<excludes>
+									<exclude>org.locationtech.geomesa:*</exclude>
+									<exclude>mil.nga.giat:*</exclude>
+									<exclude>scala:*</exclude>
+									<exclude>org.apache.accumulo:*</exclude>
+									<exclude>org.apache.thrift:*</exclude>
+									<exclude>org.apache.hadoop:*</exclude>
+									<exclude>org.apache.zookeeper:*</exclude>
+								</excludes>
+							</artifactSet>
+							<transformers>
+								<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+							</transformers>
+							<filters>
+								<filter>
+									<artifact>*:*</artifact>
+									<excludes>
+										<exclude>META-INF/*.SF</exclude>
+										<exclude>META-INF/*.DSA</exclude>
+										<exclude>META-INF/*.RSA</exclude>
+									</excludes>
+								</filter>
+							</filters>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
 
-                            <!-- Services Files -->
-                            <exclude>**/resources/META-INF/services/**</exclude>
-                        </excludes>
-                    </configuration>
-                </plugin>
-            </plugins>
-        </pluginManagement>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <configuration>
-                            <shadedArtifactAttached>true</shadedArtifactAttached>
-                            <shadedClassifierName>map-reduce</shadedClassifierName>
-                            <transformers>
-                                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
-                            </transformers>
-                            <filters>
-                                <filter>
-                                    <artifact>*:*</artifact>
-                                    <excludes>
-                                        <exclude>META-INF/*.SF</exclude>
-                                        <exclude>META-INF/*.DSA</exclude>
-                                        <exclude>META-INF/*.RSA</exclude>
-                                    </excludes>
-                                </filter>
-                            </filters>
-                        </configuration>
-                    </execution>
-                    <execution>
-                        <id>accumulo-server</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <configuration>
-                            <shadedArtifactAttached>true</shadedArtifactAttached>
-                            <shadedClassifierName>accumulo-server</shadedClassifierName>
-                            <artifactSet>
-                                <excludes>
-                                    <exclude>org.locationtech.geomesa:*</exclude>
-                                    <exclude>mil.nga.giat:*</exclude>
-                                    <exclude>scala:*</exclude>
-                                    <exclude>org.apache.accumulo:*</exclude>
-                                    <exclude>org.apache.thrift:*</exclude>
-                                    <exclude>org.apache.hadoop:*</exclude>
-                                    <exclude>org.apache.zookeeper:*</exclude>
-                                </excludes>
-                            </artifactSet>
-                            <transformers>
-                                <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
-                            </transformers>
-                            <filters>
-                                <filter>
-                                    <artifact>*:*</artifact>
-                                    <excludes>
-                                        <exclude>META-INF/*.SF</exclude>
-                                        <exclude>META-INF/*.DSA</exclude>
-                                        <exclude>META-INF/*.RSA</exclude>
-                                    </excludes>
-                                </filter>
-                            </filters>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9e76b8d7/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoEnabledFilterFunctionOptimizer.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoEnabledFilterFunctionOptimizer.java b/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoEnabledFilterFunctionOptimizer.java
deleted file mode 100644
index b7c49d8..0000000
--- a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoEnabledFilterFunctionOptimizer.java
+++ /dev/null
@@ -1,332 +0,0 @@
-package org.apache.rya.indexing;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import java.io.IOException;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.commons.lang.Validate;
-import org.apache.hadoop.conf.Configurable;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
-import org.apache.rya.accumulo.AccumuloRdfConfiguration;
-import org.apache.rya.indexing.IndexingFunctionRegistry.FUNCTION_TYPE;
-import org.apache.rya.indexing.accumulo.ConfigUtils;
-import org.apache.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer;
-import org.apache.rya.indexing.accumulo.freetext.FreeTextTupleSet;
-import org.apache.rya.indexing.accumulo.geo.GeoMesaGeoIndexer;
-import org.apache.rya.indexing.accumulo.geo.GeoParseUtils;
-import org.apache.rya.indexing.accumulo.geo.GeoTupleSet;
-import org.apache.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer;
-import org.apache.rya.indexing.mongodb.freetext.MongoFreeTextIndexer;
-import org.apache.rya.indexing.mongodb.geo.MongoGeoIndexer;
-import org.apache.rya.indexing.mongodb.temporal.MongoTemporalIndexer;
-import org.geotools.feature.SchemaException;
-import org.openrdf.model.Resource;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.URIImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.Dataset;
-import org.openrdf.query.algebra.And;
-import org.openrdf.query.algebra.Filter;
-import org.openrdf.query.algebra.FunctionCall;
-import org.openrdf.query.algebra.Join;
-import org.openrdf.query.algebra.LeftJoin;
-import org.openrdf.query.algebra.QueryModelNode;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.ValueConstant;
-import org.openrdf.query.algebra.Var;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-
-import com.google.common.collect.Lists;
-
-public class GeoEnabledFilterFunctionOptimizer implements QueryOptimizer, Configurable {
-    private static final Logger LOG = Logger.getLogger(GeoEnabledFilterFunctionOptimizer.class);
-    private final ValueFactory valueFactory = new ValueFactoryImpl();
-
-    private Configuration conf;
-    private GeoIndexer geoIndexer;
-    private FreeTextIndexer freeTextIndexer;
-    private TemporalIndexer temporalIndexer;
-    private boolean init = false;
-
-    public GeoEnabledFilterFunctionOptimizer() {
-    }
-
-    public GeoEnabledFilterFunctionOptimizer(final AccumuloRdfConfiguration conf) throws AccumuloException, AccumuloSecurityException,
-    TableNotFoundException, IOException, SchemaException, TableExistsException, NumberFormatException, UnknownHostException {
-        this.conf = conf;
-        init();
-    }
-
-    //setConf initializes FilterFunctionOptimizer so reflection can be used
-    //to create optimizer in RdfCloudTripleStoreConnection
-    @Override
-    public void setConf(final Configuration conf) {
-        this.conf = conf;
-        //reset the init.
-        init = false;
-            init();
-    }
-
-    private synchronized void init() {
-        if (!init) {
-            if (ConfigUtils.getUseMongo(conf)) {
-                    geoIndexer = new MongoGeoIndexer();
-                    geoIndexer.setConf(conf);
-                    freeTextIndexer = new MongoFreeTextIndexer();
-                    freeTextIndexer.setConf(conf);
-                    temporalIndexer = new MongoTemporalIndexer();
-                    temporalIndexer.setConf(conf);
-            } else {
-                geoIndexer = new GeoMesaGeoIndexer();
-                geoIndexer.setConf(conf);
-                freeTextIndexer = new AccumuloFreeTextIndexer();
-                freeTextIndexer.setConf(conf);
-                temporalIndexer = new AccumuloTemporalIndexer();
-                temporalIndexer.setConf(conf);
-            }
-            init = true;
-        }
-    }
-
-    @Override
-    public void optimize(final TupleExpr tupleExpr, final Dataset dataset, final BindingSet bindings) {
-     // find variables used in property and resource based searches:
-        final SearchVarVisitor searchVars = new SearchVarVisitor();
-        tupleExpr.visit(searchVars);
-        // rewrites for property searches:
-        processPropertySearches(tupleExpr, searchVars.searchProperties);
-
-    }
-
-
-
-    private void processPropertySearches(final TupleExpr tupleExpr, final Collection<Var> searchProperties) {
-        final MatchStatementVisitor matchStatements = new MatchStatementVisitor(searchProperties);
-        tupleExpr.visit(matchStatements);
-        for (final StatementPattern matchStatement: matchStatements.matchStatements) {
-            final Var subject = matchStatement.getSubjectVar();
-            if (subject.hasValue() && !(subject.getValue() instanceof Resource)) {
-                throw new IllegalArgumentException("Query error: Found " + subject.getValue() + ", expected an URI or BNode");
-            }
-            Validate.isTrue(subject.hasValue() || subject.getName() != null);
-            Validate.isTrue(!matchStatement.getObjectVar().hasValue() && matchStatement.getObjectVar().getName() != null);
-            buildQuery(tupleExpr, matchStatement);
-        }
-    }
-
-    private void buildQuery(final TupleExpr tupleExpr, final StatementPattern matchStatement) {
-        //If our IndexerExpr (to be) is the rhs-child of LeftJoin, we can safely make that a Join:
-        //  the IndexerExpr will (currently) not return results that can deliver unbound variables.
-        //This optimization should probably be generalized into a LeftJoin -> Join optimizer under certain conditions. Until that
-        //  has been done, this code path at least takes care of queries generated by OpenSahara SparqTool that filter on OPTIONAL
-        //  projections. E.g. summary~'full text search' (summary is optional). See #379
-        if (matchStatement.getParentNode() instanceof LeftJoin) {
-            final LeftJoin leftJoin = (LeftJoin)matchStatement.getParentNode();
-            if (leftJoin.getRightArg() == matchStatement && leftJoin.getCondition() == null) {
-                matchStatement.getParentNode().replaceWith(new Join(leftJoin.getLeftArg(), leftJoin.getRightArg()));
-            }
-        }
-        final FilterFunction fVisitor = new FilterFunction(matchStatement.getObjectVar().getName());
-        tupleExpr.visit(fVisitor);
-        final List<IndexingExpr> results = Lists.newArrayList();
-        for(int i = 0; i < fVisitor.func.size(); i++){
-            results.add(new IndexingExpr(fVisitor.func.get(i), matchStatement, fVisitor.args.get(i)));
-        }
-        removeMatchedPattern(tupleExpr, matchStatement, new IndexerExprReplacer(results));
-    }
-
-    //find vars contained in filters
-    private static class SearchVarVisitor extends QueryModelVisitorBase<RuntimeException> {
-        private final Collection<Var> searchProperties = new ArrayList<Var>();
-
-        @Override
-        public void meet(final FunctionCall fn) {
-            final URI fun = new URIImpl(fn.getURI());
-            final Var result = IndexingFunctionRegistry.getResultVarFromFunctionCall(fun, fn.getArgs());
-            if (result != null && !searchProperties.contains(result)) {
-                searchProperties.add(result);
-            }
-        }
-    }
-
-    //find StatementPatterns containing filter variables
-    private static class MatchStatementVisitor extends QueryModelVisitorBase<RuntimeException> {
-        private final Collection<Var> propertyVars;
-        private final Collection<Var> usedVars = new ArrayList<Var>();
-        private final List<StatementPattern> matchStatements = new ArrayList<StatementPattern>();
-
-        public MatchStatementVisitor(final Collection<Var> propertyVars) {
-            this.propertyVars = propertyVars;
-        }
-
-        @Override public void meet(final StatementPattern statement) {
-            final Var object = statement.getObjectVar();
-            if (propertyVars.contains(object)) {
-                if (usedVars.contains(object)) {
-                    throw new IllegalArgumentException("Illegal search, variable is used multiple times as object: " + object.getName());
-                } else {
-                    usedVars.add(object);
-                    matchStatements.add(statement);
-                }
-            }
-        }
-    }
-
-    private abstract class AbstractEnhanceVisitor extends QueryModelVisitorBase<RuntimeException> {
-        final String matchVar;
-        List<URI> func = Lists.newArrayList();
-        List<Object[]> args = Lists.newArrayList();
-
-        public AbstractEnhanceVisitor(final String matchVar) {
-            this.matchVar = matchVar;
-        }
-
-        protected void addFilter(final URI uri, final Object[] values) {
-            func.add(uri);
-            args.add(values);
-        }
-    }
-
-    //create indexing expression for each filter matching var in filter StatementPattern
-    //replace old filter condition with true condition
-    private class FilterFunction extends AbstractEnhanceVisitor {
-        public FilterFunction(final String matchVar) {
-            super(matchVar);
-        }
-
-        @Override
-        public void meet(final FunctionCall call) {
-            final URI fnUri = valueFactory.createURI(call.getURI());
-            final Var resultVar = IndexingFunctionRegistry.getResultVarFromFunctionCall(fnUri, call.getArgs());
-            if (resultVar != null && resultVar.getName().equals(matchVar)) {
-                addFilter(valueFactory.createURI(call.getURI()), GeoParseUtils.extractArguments(matchVar, call));
-                if (call.getParentNode() instanceof Filter || call.getParentNode() instanceof And || call.getParentNode() instanceof LeftJoin) {
-                    call.replaceWith(new ValueConstant(valueFactory.createLiteral(true)));
-                } else {
-                    throw new IllegalArgumentException("Query error: Found " + call + " as part of an expression that is too complex");
-                }
-            }
-        }
-
-        @Override
-        public void meet(final Filter filter) {
-            //First visit children, then condition (reverse of default):
-            filter.getArg().visit(this);
-            filter.getCondition().visit(this);
-        }
-    }
-
-    private void removeMatchedPattern(final TupleExpr tupleExpr, final StatementPattern pattern, final TupleExprReplacer replacer) {
-        final List<TupleExpr> indexTuples = replacer.createReplacement(pattern);
-        if (indexTuples.size() > 1) {
-            final VarExchangeVisitor vev = new VarExchangeVisitor(pattern);
-            tupleExpr.visit(vev);
-            Join join = new Join(indexTuples.remove(0), indexTuples.remove(0));
-            for (final TupleExpr geo : indexTuples) {
-                join = new Join(join, geo);
-            }
-            pattern.replaceWith(join);
-        } else if (indexTuples.size() == 1) {
-            pattern.replaceWith(indexTuples.get(0));
-            pattern.setParentNode(null);
-        } else {
-            throw new IllegalStateException("Must have at least one replacement for matched StatementPattern.");
-        }
-    }
-
-    private interface TupleExprReplacer {
-        List<TupleExpr> createReplacement(TupleExpr org);
-    }
-
-    //replace each filter pertinent StatementPattern with corresponding index expr
-    private class IndexerExprReplacer implements TupleExprReplacer {
-        private final List<IndexingExpr> indxExpr;
-        private final FUNCTION_TYPE type;
-
-        public IndexerExprReplacer(final List<IndexingExpr> indxExpr) {
-            this.indxExpr = indxExpr;
-            final URI func = indxExpr.get(0).getFunction();
-            type = IndexingFunctionRegistry.getFunctionType(func);
-        }
-
-        @Override
-        public List<TupleExpr> createReplacement(final TupleExpr org) {
-            final List<TupleExpr> indexTuples = Lists.newArrayList();
-            switch (type) {
-            case GEO:
-                for (final IndexingExpr indx : indxExpr) {
-                    indexTuples.add(new GeoTupleSet(indx, geoIndexer));
-                }
-                break;
-            case FREETEXT:
-                for (final IndexingExpr indx : indxExpr) {
-                    indexTuples.add(new FreeTextTupleSet(indx, freeTextIndexer));
-                }
-                break;
-            case TEMPORAL:
-                for (final IndexingExpr indx : indxExpr) {
-                    indexTuples.add(new TemporalTupleSet(indx, temporalIndexer));
-                }
-                break;
-            default:
-                throw new IllegalArgumentException("Incorrect type!");
-            }
-            return indexTuples;
-        }
-    }
-
-    private static class VarExchangeVisitor extends QueryModelVisitorBase<RuntimeException> {
-        private final  StatementPattern exchangeVar;
-        public VarExchangeVisitor(final StatementPattern sp) {
-            exchangeVar = sp;
-        }
-
-        @Override
-        public void meet(final Join node) {
-            final QueryModelNode lNode = node.getLeftArg();
-            if (lNode instanceof StatementPattern) {
-                exchangeVar.replaceWith(lNode);
-                node.setLeftArg(exchangeVar);
-            } else {
-                super.meet(node);
-            }
-        }
-    }
-
-    @Override
-    public Configuration getConf() {
-        return conf;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9e76b8d7/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoIndexer.java b/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoIndexer.java
deleted file mode 100644
index d091d32..0000000
--- a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoIndexer.java
+++ /dev/null
@@ -1,210 +0,0 @@
-package org.apache.rya.indexing;
-
-import org.openrdf.model.Statement;
-import org.openrdf.query.QueryEvaluationException;
-
-import com.vividsolutions.jts.geom.Geometry;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-
-import info.aduna.iteration.CloseableIteration;
-import org.apache.rya.api.persist.index.RyaSecondaryIndexer;
-import org.apache.rya.indexing.accumulo.geo.GeoTupleSet.GeoSearchFunctionFactory.NearQuery;
-
-/**
- * A repository to store, index, and retrieve {@link Statement}s based on geospatial features.
- */
-public interface GeoIndexer extends RyaSecondaryIndexer {
-	/**
-	 * Returns statements that contain a geometry that is equal to the queried {@link Geometry} and meet the {@link StatementConstraints}.
-	 *
-	 * <p>
-	 * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM):
-	 * <ul>
-	 * <li>
-	 * "Two geometries are topologically equal if their interiors intersect and no part of the interior or boundary of one geometry intersects the exterior of the other"
-	 * <li>"A is equal to B if A is within B and A contains B"
-	 * </ul>
-	 *
-	 * @param query
-	 *            the queried geometry
-	 * @param contraints
-	 *            the {@link StatementConstraints}
-	 * @return
-	 */
-	public abstract CloseableIteration<Statement, QueryEvaluationException> queryEquals(Geometry query, StatementConstraints contraints);
-
-	/**
-	 * Returns statements that contain a geometry that is disjoint to the queried {@link Geometry} and meet the {@link StatementConstraints}.
-	 *
-	 * <p>
-	 * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM):
-	 * <ul>
-	 * <li>"A and B are disjoint if they have no point in common. They form a set of disconnected geometries."
-	 * <li>"A and B are disjoint if A does not intersect B"
-	 * </ul>
-	 *
-	 * @param query
-	 *            the queried geometry
-	 * @param contraints
-	 *            the {@link StatementConstraints}
-	 * @return
-	 */
-	public abstract CloseableIteration<Statement, QueryEvaluationException> queryDisjoint(Geometry query, StatementConstraints contraints);
-
-	/**
-	 * Returns statements that contain a geometry that Intersects the queried {@link Geometry} and meet the {@link StatementConstraints}.
-	 *
-	 * <p>
-	 * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM):
-	 * <ul>
-	 * <li>"a intersects b: geometries a and b have at least one point in common."
-	 * <li>"not Disjoint"
-	 * </ul>
-	 *
-	 *
-	 * @param query
-	 *            the queried geometry
-	 * @param contraints
-	 *            the {@link StatementConstraints}
-	 * @return
-	 */
-	public abstract CloseableIteration<Statement, QueryEvaluationException> queryIntersects(Geometry query, StatementConstraints contraints);
-
-	/**
-	 * Returns statements that contain a geometry that Touches the queried {@link Geometry} and meet the {@link StatementConstraints}.
-	 *
-	 * <p>
-	 * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM):
-	 * <ul>
-	 * <li>"a touches b, they have at least one boundary point in common, but no interior points."
-	 * </ul>
-	 *
-	 *
-	 * @param query
-	 *            the queried geometry
-	 * @param contraints
-	 *            the {@link StatementConstraints}
-	 * @return
-	 */
-	public abstract CloseableIteration<Statement, QueryEvaluationException> queryTouches(Geometry query, StatementConstraints contraints);
-
-	/**
-	 * Returns statements that contain a geometry that crosses the queried {@link Geometry} and meet the {@link StatementConstraints}.
-	 *
-	 * <p>
-	 * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM):
-	 * <ul>
-	 * <li>
-	 * "a crosses b, they have some but not all interior points in common (and the dimension of the intersection is less than that of at least one of them)."
-	 * </ul>
-	 *
-	 * @param query
-	 *            the queried geometry
-	 * @param contraints
-	 *            the {@link StatementConstraints}
-	 * @return
-	 */
-	public abstract CloseableIteration<Statement, QueryEvaluationException> queryCrosses(Geometry query, StatementConstraints contraints);
-
-	/**
-	 * Returns statements that contain a geometry that is Within the queried {@link Geometry} and meet the {@link StatementConstraints}.
-	 *
-	 * <p>
-	 * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM):
-	 * <ul>
-	 * <li>"a is within b, a lies in the interior of b"
-	 * <li>Same as: "Contains(b,a)"
-	 * </ul>
-	 *
-	 *
-	 * @param query
-	 *            the queried geometry
-	 * @param contraints
-	 *            the {@link StatementConstraints}
-	 * @return
-	 */
-	public abstract CloseableIteration<Statement, QueryEvaluationException> queryWithin(Geometry query, StatementConstraints contraints);
-
-	/**
-	 * Returns statements that contain a geometry that Contains the queried {@link Geometry} and meet the {@link StatementConstraints}.
-	 *
-	 * <p>
-	 * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM):
-	 * <ul>
-	 * <li>b is within a. Geometry b lies in the interior of a. Another definition:
-	 * "a 'contains' b iff no points of b lie in the exterior of a, and at least one point of the interior of b lies in the interior of a"
-	 * <li>Same: Within(b,a)
-	 * </ul>
-	 *
-	 *
-	 * @param query
-	 *            the queried geometry
-	 * @param contraints
-	 *            the {@link StatementConstraints}
-	 * @return
-	 */
-	public abstract CloseableIteration<Statement, QueryEvaluationException> queryContains(Geometry query, StatementConstraints contraints);
-
-	/**
-	 * Returns statements that contain a geometry that Overlaps the queried {@link Geometry} and meet the {@link StatementConstraints}.
-	 *
-	 * <p>
-	 * From Wikipedia (http://en.wikipedia.org/wiki/DE-9IM):
-	 * <ul>
-	 * <li>a crosses b, they have some but not all interior points in common (and the dimension of the intersection is less than that of at
-	 * least one of them).
-	 * </ul>
-	 *
-	 *
-	 * @param query
-	 *            the queried geometry
-	 * @param contraints
-	 *            the {@link StatementConstraints}
-	 * @return
-	 */
-	public abstract CloseableIteration<Statement, QueryEvaluationException> queryOverlaps(Geometry query, StatementConstraints contraints);
-	
-    /**
-     * Returns statements that contain a geometry that is near the queried {@link Geometry} and meet the {@link StatementConstraints}.
-     * <p>
-     * A geometry is considered near if it within the min/max distances specified in the provided {@link NearQuery}.  This will make a disc (specify max),
-     *  a donut(specify both), or a spheroid complement disc (specify min)
-     * <p>
-     * The distances are specified in meters and must be >= 0.
-     * <p>
-     * To specify max/min distances:
-     * <ul>
-     * <li>Enter parameters in order MAX, MIN -- Donut</li>
-     * <li>Omit the MIN -- Disc</li>
-     * <li>Enter 0 for MAX, and Enter parameter for MIN -- Spheroid complement Dist</li>
-     * <li>Omit both -- Default max/min [TODO: Find these values]</li>
-     * </ul>
-     * <p>
-     * Note: This query will not fail if the min is greater than the max, it will just return no results.
-     * 
-     * @param query the queried geometry, with Optional min and max distance fields.
-     * @param contraints the {@link StatementConstraints}
-     * @return
-     */
-    public abstract CloseableIteration<Statement, QueryEvaluationException> queryNear(NearQuery query, StatementConstraints contraints);
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9e76b8d7/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoIndexerType.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoIndexerType.java b/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoIndexerType.java
deleted file mode 100644
index 1af51b0..0000000
--- a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoIndexerType.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.indexing;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-import org.apache.rya.indexing.accumulo.geo.GeoMesaGeoIndexer;
-import org.apache.rya.indexing.accumulo.geo.GeoWaveGeoIndexer;
-import org.apache.rya.indexing.mongodb.geo.MongoGeoIndexer;
-
-/**
- * A list of all the types of Geo indexers supported in Rya.
- */
-public enum GeoIndexerType {
-    /**
-     * Geo Mesa based indexer.
-     */
-    GEO_MESA(GeoMesaGeoIndexer.class),
-    /**
-     * Geo Wave based indexer.
-     */
-    GEO_WAVE(GeoWaveGeoIndexer.class),
-    /**
-     * MongoDB based indexer.
-     */
-    MONGO_DB(MongoGeoIndexer.class);
-
-    private Class<? extends GeoIndexer> geoIndexerClass;
-
-    /**
-     * Creates a new {@link GeoIndexerType}.
-     * @param geoIndexerClass the {@link GeoIndexer} {@link Class}.
-     * (not {@code null})
-     */
-    private GeoIndexerType(final Class<? extends GeoIndexer> geoIndexerClass) {
-        this.geoIndexerClass = checkNotNull(geoIndexerClass);
-    }
-
-    /**
-     * @return the {@link GeoIndexer} {@link Class}. (not {@code null})
-     */
-    public Class<? extends GeoIndexer> getGeoIndexerClass() {
-        return geoIndexerClass;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9e76b8d7/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoRyaSailFactory.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoRyaSailFactory.java b/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoRyaSailFactory.java
deleted file mode 100644
index 89933df..0000000
--- a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/GeoRyaSailFactory.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.indexing;
-
-import static java.util.Objects.requireNonNull;
-
-import java.net.UnknownHostException;
-import java.util.Objects;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
-import org.apache.hadoop.conf.Configuration;
-import org.openrdf.sail.Sail;
-import org.openrdf.sail.SailException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.mongodb.MongoClient;
-
-import org.apache.rya.accumulo.AccumuloRdfConfiguration;
-import org.apache.rya.accumulo.AccumuloRyaDAO;
-import org.apache.rya.accumulo.instance.AccumuloRyaInstanceDetailsRepository;
-import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
-import org.apache.rya.api.instance.RyaDetailsRepository.RyaDetailsRepositoryException;
-import org.apache.rya.api.instance.RyaDetailsToConfiguration;
-import org.apache.rya.api.layout.TablePrefixLayoutStrategy;
-import org.apache.rya.api.persist.RyaDAO;
-import org.apache.rya.api.persist.RyaDAOException;
-import org.apache.rya.indexing.accumulo.ConfigUtils;
-import org.apache.rya.mongodb.MongoConnectorFactory;
-import org.apache.rya.mongodb.MongoDBRdfConfiguration;
-import org.apache.rya.mongodb.MongoDBRyaDAO;
-import org.apache.rya.mongodb.instance.MongoRyaInstanceDetailsRepository;
-import org.apache.rya.rdftriplestore.RdfCloudTripleStore;
-import org.apache.rya.rdftriplestore.inference.InferenceEngine;
-import org.apache.rya.rdftriplestore.inference.InferenceEngineException;
-import org.apache.rya.sail.config.RyaSailFactory;
-
-public class GeoRyaSailFactory {
-    private static final Logger LOG = LoggerFactory.getLogger(GeoRyaSailFactory.class);
-
-    /**
-     * Creates an instance of {@link Sail} that is attached to a Rya instance.
-     *
-     * @param conf - Configures how the Sail object will be constructed. (not null)
-     * @return A {@link Sail} object that is backed by a Rya datastore.
-     * @throws SailException The object could not be created.
-     */
-    public static Sail getInstance(final Configuration conf) throws AccumuloException,
-        AccumuloSecurityException, RyaDAOException, InferenceEngineException, SailException {
-        requireNonNull(conf);
-        return getRyaSail(conf);
-    }
-
-    private static Sail getRyaSail(final Configuration config) throws InferenceEngineException, RyaDAOException, AccumuloException, AccumuloSecurityException, SailException {
-        final RdfCloudTripleStore store = new RdfCloudTripleStore();
-        final RyaDAO<?> dao;
-        final RdfCloudTripleStoreConfiguration rdfConfig;
-
-        final String user;
-        final String pswd;
-        // XXX Should(?) be MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX inside the if below. RYA-135
-        final String ryaInstance = config.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX);
-        Objects.requireNonNull(ryaInstance, "RyaInstance or table prefix is missing from configuration."+RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX);
-
-        if(ConfigUtils.getUseMongo(config)) {
-            final MongoDBRdfConfiguration mongoConfig = new MongoDBRdfConfiguration(config);
-            rdfConfig = mongoConfig;
-            final MongoClient client = MongoConnectorFactory.getMongoClient(config);
-            try {
-                final MongoRyaInstanceDetailsRepository ryaDetailsRepo = new MongoRyaInstanceDetailsRepository(client, mongoConfig.getCollectionName());
-                RyaDetailsToConfiguration.addRyaDetailsToConfiguration(ryaDetailsRepo.getRyaInstanceDetails(), mongoConfig);
-            } catch (final RyaDetailsRepositoryException e) {
-                LOG.info("Instance does not have a rya details collection, skipping.");
-            }
-            dao = getMongoDAO((MongoDBRdfConfiguration)rdfConfig, client);
-        } else {
-            rdfConfig = new AccumuloRdfConfiguration(config);
-            user = rdfConfig.get(ConfigUtils.CLOUDBASE_USER);
-            pswd = rdfConfig.get(ConfigUtils.CLOUDBASE_PASSWORD);
-            Objects.requireNonNull(user, "Accumulo user name is missing from configuration."+ConfigUtils.CLOUDBASE_USER);
-            Objects.requireNonNull(pswd, "Accumulo user password is missing from configuration."+ConfigUtils.CLOUDBASE_PASSWORD);
-            rdfConfig.setTableLayoutStrategy( new TablePrefixLayoutStrategy(ryaInstance) );
-            RyaSailFactory.updateAccumuloConfig((AccumuloRdfConfiguration) rdfConfig, user, pswd, ryaInstance);
-            dao = getAccumuloDAO((AccumuloRdfConfiguration)rdfConfig);
-        }
-        store.setRyaDAO(dao);
-        rdfConfig.setTablePrefix(ryaInstance);
-
-        if (rdfConfig.isInfer()){
-            final InferenceEngine inferenceEngine = new InferenceEngine();
-            inferenceEngine.setConf(rdfConfig);
-            inferenceEngine.setRyaDAO(dao);
-            inferenceEngine.init();
-            store.setInferenceEngine(inferenceEngine);
-        }
-
-        store.initialize();
-
-        return store;
-    }
-
-    private static MongoDBRyaDAO getMongoDAO(final MongoDBRdfConfiguration config, final MongoClient client) throws RyaDAOException {
-        MongoDBRyaDAO dao = null;
-        OptionalConfigUtils.setIndexers(config);
-        if(client != null) {
-            dao = new MongoDBRyaDAO(config, client);
-        } else {
-            try {
-                dao = new MongoDBRyaDAO(config);
-            } catch (NumberFormatException | UnknownHostException e) {
-                throw new RyaDAOException("Unable to connect to mongo at the configured location.", e);
-            }
-        }
-        dao.init();
-        return dao;
-    }
-
-    private static AccumuloRyaDAO getAccumuloDAO(final AccumuloRdfConfiguration config) throws AccumuloException, AccumuloSecurityException, RyaDAOException {
-        final Connector connector = ConfigUtils.getConnector(config);
-        final AccumuloRyaDAO dao = new AccumuloRyaDAO();
-        dao.setConnector(connector);
-
-        OptionalConfigUtils.setIndexers(config);
-        config.setDisplayQueryPlan(true);
-
-        dao.setConf(config);
-        dao.init();
-        return dao;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9e76b8d7/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/OptionalConfigUtils.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/OptionalConfigUtils.java b/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/OptionalConfigUtils.java
deleted file mode 100644
index 8d4486f..0000000
--- a/extras/rya.geoindexing/src/main/java/org/apache/rya/indexing/OptionalConfigUtils.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.indexing;
-
-import java.util.List;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
-import org.apache.rya.accumulo.AccumuloRdfConfiguration;
-import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
-import org.apache.rya.api.instance.RyaDetails;
-import org.apache.rya.indexing.accumulo.ConfigUtils;
-import org.apache.rya.indexing.accumulo.geo.GeoMesaGeoIndexer;
-import org.apache.rya.indexing.geotemporal.GeoTemporalOptimizer;
-import org.apache.rya.indexing.geotemporal.mongo.MongoGeoTemporalIndexer;
-import org.apache.rya.indexing.mongodb.geo.MongoGeoIndexer;
-import org.openrdf.model.URI;
-
-import com.google.common.collect.Lists;
-
-/**
- * A set of configuration utils to read a Hadoop {@link Configuration} object and create Cloudbase/Accumulo objects.
- * Soon will deprecate this class.  Use installer for the set methods, use {@link RyaDetails} for the get methods.
- * New code must separate parameters that are set at Rya install time from that which is specific to the client.
- * Also Accumulo index tables are pushed down to the implementation and not configured in conf.
- */
-public class OptionalConfigUtils extends ConfigUtils {
-    private static final Logger logger = Logger.getLogger(OptionalConfigUtils.class);
-
-
-    public static final String GEO_NUM_PARTITIONS = "sc.geo.numPartitions";
-
-    public static final String USE_GEO = "sc.use_geo";
-    public static final String USE_GEOTEMPORAL = "sc.use_geotemporal";
-    public static final String USE_FREETEXT = "sc.use_freetext";
-    public static final String USE_TEMPORAL = "sc.use_temporal";
-    public static final String USE_ENTITY = "sc.use_entity";
-    public static final String USE_PCJ = "sc.use_pcj";
-    public static final String USE_OPTIMAL_PCJ = "sc.use.optimal.pcj";
-    public static final String USE_PCJ_UPDATER_INDEX = "sc.use.updater";
-    public static final String GEO_PREDICATES_LIST = "sc.geo.predicates";
-    public static final String GEO_INDEXER_TYPE = "sc.geo.geo_indexer_type";
-
-    public static Set<URI> getGeoPredicates(final Configuration conf) {
-        return getPredicates(conf, GEO_PREDICATES_LIST);
-    }
-
-    public static int getGeoNumPartitions(final Configuration conf) {
-        return conf.getInt(GEO_NUM_PARTITIONS, getNumPartitions(conf));
-    }
-
-    public static boolean getUseGeo(final Configuration conf) {
-        return conf.getBoolean(USE_GEO, false);
-    }
-
-    public static boolean getUseGeoTemporal(final Configuration conf) {
-        return conf.getBoolean(USE_GEOTEMPORAL, false);
-    }
-
-    /**
-     * Retrieves the value for the geo indexer type from the config.
-     * @param conf the {@link Configuration}.
-     * @return the {@link GeoIndexerType} found in the config or
-     * {@code null} if it doesn't exist.
-     */
-    public static GeoIndexerType getGeoIndexerType(final Configuration conf) {
-        return conf.getEnum(GEO_INDEXER_TYPE, null);
-    }
-
-    public static void setIndexers(final RdfCloudTripleStoreConfiguration conf) {
-        final List<String> indexList = Lists.newArrayList();
-        final List<String> optimizers = Lists.newArrayList();
-
-        boolean useFilterIndex = false;
-        ConfigUtils.setIndexers(conf);
-        final String[] existingIndexers = conf.getStrings(AccumuloRdfConfiguration.CONF_ADDITIONAL_INDEXERS);
-        if(existingIndexers != null ) {
-            for (final String index : existingIndexers) {
-                indexList.add(index);
-            }
-            for (final String optimizer : conf.getStrings(RdfCloudTripleStoreConfiguration.CONF_OPTIMIZERS)){
-                optimizers.add(optimizer);
-            }
-        }
-
-        final GeoIndexerType geoIndexerType = getGeoIndexerType(conf);
-
-        if (ConfigUtils.getUseMongo(conf)) {
-            if (getUseGeo(conf)) {
-                if (geoIndexerType == null) {
-                    // Default to MongoGeoIndexer if not specified
-                    indexList.add(MongoGeoIndexer.class.getName());
-                } else {
-                    indexList.add(geoIndexerType.getGeoIndexerClass().getName());
-                }
-                useFilterIndex = true;
-            }
-
-            if (getUseGeoTemporal(conf)) {
-                indexList.add(MongoGeoTemporalIndexer.class.getName());
-                optimizers.add(GeoTemporalOptimizer.class.getName());
-            }
-        } else {
-            if (getUseGeo(conf)) {
-                if (geoIndexerType == null) {
-                    // Default to GeoMesaGeoIndexer if not specified
-                    indexList.add(GeoMesaGeoIndexer.class.getName());
-                } else {
-                    indexList.add(geoIndexerType.getGeoIndexerClass().getName());
-                }
-                useFilterIndex = true;
-            }
-        }
-
-        if (useFilterIndex) {
-            optimizers.remove(FilterFunctionOptimizer.class.getName());
-            optimizers.add(GeoEnabledFilterFunctionOptimizer.class.getName());
-        }
-
-        conf.setStrings(AccumuloRdfConfiguration.CONF_ADDITIONAL_INDEXERS, indexList.toArray(new String[]{}));
-        conf.setStrings(RdfCloudTripleStoreConfiguration.CONF_OPTIMIZERS, optimizers.toArray(new String[]{}));
-    }
-}