You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@rya.apache.org by mi...@apache.org on 2015/12/05 00:10:12 UTC

[1/3] incubator-rya git commit: RYA-13 Add delete support to secondary indices

Repository: incubator-rya
Updated Branches:
  refs/heads/develop 80faf06d4 -> 990f1ffe2


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
index 8ca96bc..c204f3c 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
@@ -22,16 +22,12 @@ package mvm.rya.indexing.accumulo.geo;
 
 
 import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement;
-import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
 import java.util.UUID;
 
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-
 import org.apache.accumulo.core.client.admin.TableOperations;
 import org.apache.hadoop.conf.Configuration;
 import org.junit.Assert;
@@ -55,6 +51,10 @@ import com.vividsolutions.jts.geom.Polygon;
 import com.vividsolutions.jts.geom.PrecisionModel;
 import com.vividsolutions.jts.geom.impl.PackedCoordinateSequence;
 
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+
 public class GeoIndexerTest {
 
     private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints();
@@ -89,42 +89,41 @@ public class GeoIndexerTest {
     @Test
     public void testRestrictPredicatesSearch() throws Exception {
         conf.setStrings(ConfigUtils.GEO_PREDICATES_LIST, "pred:1,pred:2");
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
 
-        Point point = gf.createPoint(new Coordinate(10, 10));
-        Value pointValue = vf.createLiteral("Point(10 10)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        URI invalidPredicate = GeoConstants.GEO_AS_WKT;
+            ValueFactory vf = new ValueFactoryImpl();
 
-        // These should not be stored because they are not in the predicate list
-        f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), invalidPredicate, pointValue)));
-        f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), invalidPredicate, pointValue)));
+            Point point = gf.createPoint(new Coordinate(10, 10));
+            Value pointValue = vf.createLiteral("Point(10 10)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            URI invalidPredicate = GeoConstants.GEO_AS_WKT;
 
-        URI pred1 = vf.createURI("pred:1");
-        URI pred2 = vf.createURI("pred:2");
+            // These should not be stored because they are not in the predicate list
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), invalidPredicate, pointValue)));
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), invalidPredicate, pointValue)));
 
-        // These should be stored because they are in the predicate list
-        Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1, pointValue);
-        Statement s4 = new StatementImpl(vf.createURI("foo:subj4"), pred2, pointValue);
-        f.storeStatement(convertStatement(s3));
-        f.storeStatement(convertStatement(s4));
+            URI pred1 = vf.createURI("pred:1");
+            URI pred2 = vf.createURI("pred:2");
 
-        // This should not be stored because the object is not valid wkt
-        f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj5"), pred1, vf.createLiteral("soint(10 10)"))));
+            // These should be stored because they are in the predicate list
+            Statement s3 = new StatementImpl(vf.createURI("foo:subj3"), pred1, pointValue);
+            Statement s4 = new StatementImpl(vf.createURI("foo:subj4"), pred2, pointValue);
+            f.storeStatement(convertStatement(s3));
+            f.storeStatement(convertStatement(s4));
 
-        // This should not be stored because the object is not a literal
-        f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj6"), pred1, vf.createURI("p:Point(10 10)"))));
+            // This should not be stored because the object is not valid wkt
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj5"), pred1, vf.createLiteral("soint(10 10)"))));
 
-        f.flush();
+            // This should not be stored because the object is not a literal
+            f.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj6"), pred1, vf.createURI("p:Point(10 10)"))));
 
-        Set<Statement> actual = getSet(f.queryEquals(point, EMPTY_CONSTRAINTS));
-        Assert.assertEquals(2, actual.size());
-        Assert.assertTrue(actual.contains(s3));
-        Assert.assertTrue(actual.contains(s4));
+            f.flush();
 
-        f.close();
+            Set<Statement> actual = getSet(f.queryEquals(point, EMPTY_CONSTRAINTS));
+            Assert.assertEquals(2, actual.size());
+            Assert.assertTrue(actual.contains(s3));
+            Assert.assertTrue(actual.contains(s4));
+        }
     }
 
     private static <X> Set<X> getSet(CloseableIteration<X, ?> iter) throws Exception {
@@ -137,234 +136,264 @@ public class GeoIndexerTest {
 
     @Test
     public void testPrimeMeridianSearch() throws Exception {
-        
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(0 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
-
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
-
-        double[] ONE = { 1, 1, -1, 1, -1, -1, 1, -1, 1, 1 };
-        double[] TWO = { 2, 2, -2, 2, -2, -2, 2, -2, 2, 2 };
-        double[] THREE = { 3, 3, -3, 3, -3, -3, 3, -3, 3, 3 };
-
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
-        LinearRing r2 = gf.createLinearRing(new PackedCoordinateSequence.Double(TWO, 2));
-        LinearRing r3 = gf.createLinearRing(new PackedCoordinateSequence.Double(THREE, 2));
-
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
-        Polygon p2 = gf.createPolygon(r2, new LinearRing[] {});
-        Polygon p3 = gf.createPolygon(r3, new LinearRing[] {});
-
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p2, EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p3, EMPTY_CONSTRAINTS)));
-
-        // Test a ring with a hole in it
-        Polygon p3m2 = gf.createPolygon(r3, new LinearRing[] { r2 });
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p3m2, EMPTY_CONSTRAINTS)));
-
-        // test a ring outside the point
-        double[] OUT = { 3, 3, 1, 3, 1, 1, 3, 1, 3, 3 };
-        LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
-        Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
-
-        f.close();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(0 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] ONE = { 1, 1, -1, 1, -1, -1, 1, -1, 1, 1 };
+            double[] TWO = { 2, 2, -2, 2, -2, -2, 2, -2, 2, 2 };
+            double[] THREE = { 3, 3, -3, 3, -3, -3, 3, -3, 3, 3 };
+
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
+            LinearRing r2 = gf.createLinearRing(new PackedCoordinateSequence.Double(TWO, 2));
+            LinearRing r3 = gf.createLinearRing(new PackedCoordinateSequence.Double(THREE, 2));
+
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Polygon p2 = gf.createPolygon(r2, new LinearRing[] {});
+            Polygon p3 = gf.createPolygon(r3, new LinearRing[] {});
+
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p2, EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p3, EMPTY_CONSTRAINTS)));
+
+            // Test a ring with a hole in it
+            Polygon p3m2 = gf.createPolygon(r3, new LinearRing[] { r2 });
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p3m2, EMPTY_CONSTRAINTS)));
+
+            // test a ring outside the point
+            double[] OUT = { 3, 3, 1, 3, 1, 1, 3, 1, 3, 3 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+        }
     }
 
     @Test
     public void testDcSearch() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
-
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
-
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
-
-        // test a ring outside the point
-        double[] OUT = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
-        LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
-        Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
-
-        f.close();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+
+            // test a ring outside the point
+            double[] OUT = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(OUT, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+        }
     }
 
     @Test
-    public void testDcSearchWithContext() throws Exception {
+    public void testDeleteSearch() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            f.deleteStatement(convertStatement(statement));
+
+            // test a ring that the point would be inside of if not deleted
+            double[] in = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(in, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+
+            // test a ring that the point would be outside of if not deleted
+            double[] out = { -77, 39, -76, 39, -76, 38, -77, 38, -77, 39 };
+            LinearRing rOut = gf.createLinearRing(new PackedCoordinateSequence.Double(out, 2));
+            Polygon pOut = gf.createPolygon(rOut, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
+
+            // test a ring for the whole world and make sure the point is gone
+            double[] world = { -180, 90, 180, 90, -180, 90, -180, -90, -180, 90 };
+            LinearRing rWorld = gf.createLinearRing(new PackedCoordinateSequence.Double(world, 2));
+            Polygon pWorld = gf.createPolygon(rWorld, new LinearRing[] {});
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pWorld, EMPTY_CONSTRAINTS)));
+        }
+    }
 
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
+    @Test
+    public void testDcSearchWithContext() throws Exception {
+        // test a ring around dc
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
 
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
 
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
 
-        // query with correct context
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setContext(context))));
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
 
-        // query with wrong context
-        Assert.assertEquals(Sets.newHashSet(),
-                getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
+            // query with correct context
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setContext(context))));
 
-        f.close();
+            // query with wrong context
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
+        }
     }
 
     @Test
     public void testDcSearchWithSubject() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-        
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
-
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
-
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
-
-        // query with correct subject
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setSubject(subject))));
-
-        // query with wrong subject
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
-
-        f.close();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct subject
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryWithin(p1, new StatementContraints().setSubject(subject))));
+
+            // query with wrong subject
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
+        }
     }
 
     @Test
     public void testDcSearchWithSubjectAndContext() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
 
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
 
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
 
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
 
-        // query with correct context subject
-        Assert.assertEquals(Sets.newHashSet(statement),
-                getSet(f.queryWithin(p1, new StatementContraints().setContext(context).setSubject(subject))));
+            // query with correct context subject
+            Assert.assertEquals(Sets.newHashSet(statement),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(context).setSubject(subject))));
 
-        // query with wrong context
-        Assert.assertEquals(Sets.newHashSet(),
-                getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
+            // query with wrong context
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setContext(vf.createURI("foo:context2")))));
 
-        // query with wrong subject
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
-
-        f.close();
+            // query with wrong subject
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(p1, new StatementContraints().setSubject(vf.createURI("foo:subj2")))));
+        }
     }
 
     @Test
     public void testDcSearchWithPredicate() throws Exception {
         // test a ring around dc
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource subject = vf.createURI("foo:subj");
-        URI predicate = GeoConstants.GEO_AS_WKT;
-        Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Resource context = vf.createURI("foo:context");
-
-        Statement statement = new ContextStatementImpl(subject, predicate, object, context);
-        f.storeStatement(convertStatement(statement));
-        f.flush();
-
-        double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
-
-        // query with correct Predicate
-        Assert.assertEquals(Sets.newHashSet(statement),
-                getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(predicate)))));
-
-        // query with wrong predicate
-        Assert.assertEquals(Sets.newHashSet(),
-                getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(vf.createURI("other:pred"))))));
-
-        f.close();
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource subject = vf.createURI("foo:subj");
+            URI predicate = GeoConstants.GEO_AS_WKT;
+            Value object = vf.createLiteral("Point(-77.03524 38.889468)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Resource context = vf.createURI("foo:context");
+
+            Statement statement = new ContextStatementImpl(subject, predicate, object, context);
+            f.storeStatement(convertStatement(statement));
+            f.flush();
+
+            double[] IN = { -78, 39, -77, 39, -77, 38, -78, 38, -78, 39 };
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(IN, 2));
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+
+            // query with correct Predicate
+            Assert.assertEquals(Sets.newHashSet(statement),
+                    getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(predicate)))));
+
+            // query with wrong predicate
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryWithin(p1, new StatementContraints().setPredicates(Collections.singleton(vf.createURI("other:pred"))))));
+        }
     }
 
     // @Test
     public void testAntiMeridianSearch() throws Exception {
         // verify that a search works if the bounding box crosses the anti meridian
-        GeoMesaGeoIndexer f = new GeoMesaGeoIndexer();
-        f.setConf(conf);
+        try (GeoMesaGeoIndexer f = new GeoMesaGeoIndexer()) {
+            f.setConf(conf);
 
-        ValueFactory vf = new ValueFactoryImpl();
-        Resource context = vf.createURI("foo:context");
+            ValueFactory vf = new ValueFactoryImpl();
+            Resource context = vf.createURI("foo:context");
 
-        Resource subjectEast = vf.createURI("foo:subj:east");
-        URI predicateEast = GeoConstants.GEO_AS_WKT;
-        Value objectEast = vf.createLiteral("Point(179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Statement statementEast = new ContextStatementImpl(subjectEast, predicateEast, objectEast, context);
-        f.storeStatement(convertStatement(statementEast));
+            Resource subjectEast = vf.createURI("foo:subj:east");
+            URI predicateEast = GeoConstants.GEO_AS_WKT;
+            Value objectEast = vf.createLiteral("Point(179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Statement statementEast = new ContextStatementImpl(subjectEast, predicateEast, objectEast, context);
+            f.storeStatement(convertStatement(statementEast));
 
-        Resource subjectWest = vf.createURI("foo:subj:west");
-        URI predicateWest = GeoConstants.GEO_AS_WKT;
-        Value objectWest = vf.createLiteral("Point(-179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
-        Statement statementWest = new ContextStatementImpl(subjectWest, predicateWest, objectWest, context);
-        f.storeStatement(convertStatement(statementWest));
+            Resource subjectWest = vf.createURI("foo:subj:west");
+            URI predicateWest = GeoConstants.GEO_AS_WKT;
+            Value objectWest = vf.createLiteral("Point(-179 0)", GeoConstants.XMLSCHEMA_OGC_WKT);
+            Statement statementWest = new ContextStatementImpl(subjectWest, predicateWest, objectWest, context);
+            f.storeStatement(convertStatement(statementWest));
 
-        f.flush();
+            f.flush();
 
-        double[] ONE = { 178.1, 1, -178, 1, -178, -1, 178.1, -1, 178.1, 1 };
+            double[] ONE = { 178.1, 1, -178, 1, -178, -1, 178.1, -1, 178.1, 1 };
 
-        LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
+            LinearRing r1 = gf.createLinearRing(new PackedCoordinateSequence.Double(ONE, 2));
 
-        Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
+            Polygon p1 = gf.createPolygon(r1, new LinearRing[] {});
 
-        Assert.assertEquals(Sets.newHashSet(statementEast, statementWest), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
-
-        f.close();
+            Assert.assertEquals(Sets.newHashSet(statementEast, statementWest), getSet(f.queryWithin(p1, EMPTY_CONSTRAINTS)));
+        }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
index 60d237d..1c6628f 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexerTest.java
@@ -1,6 +1,3 @@
-/**
- * 
- */
 package mvm.rya.indexing.accumulo.temporal;
 
 /*
@@ -26,7 +23,6 @@ package mvm.rya.indexing.accumulo.temporal;
 import static mvm.rya.api.resolver.RdfToRyaConversions.convertStatement;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
-import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
 import java.io.PrintStream;
@@ -42,15 +38,6 @@ import java.util.Map.Entry;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicLong;
 
-import junit.framework.Assert;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.TemporalInstant;
-import mvm.rya.indexing.TemporalInterval;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.StatementSerializer;
-
-import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Scanner;
@@ -59,6 +46,7 @@ import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.admin.TableOperations;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.security.Authorizations;
 import org.apache.commons.codec.binary.StringUtils;
 import org.apache.commons.io.output.NullOutputStream;
 import org.apache.hadoop.conf.Configuration;
@@ -78,25 +66,34 @@ import org.openrdf.query.QueryEvaluationException;
 
 import com.beust.jcommander.internal.Lists;
 
+import info.aduna.iteration.CloseableIteration;
+import junit.framework.Assert;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.TemporalInstant;
+import mvm.rya.indexing.TemporalInterval;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.StatementSerializer;
+
 /**
  * JUnit tests for TemporalIndexer and it's implementation AccumuloTemporalIndexer
- * 
+ *
  * If you enjoy this test, please read RyaTemporalIndexerTest and YagoKBTest, which contain
  * many example SPARQL queries and updates and attempts to test independently of Accumulo:
- * 
+ *
  *     extras/indexingSail/src/test/java/mvm/rya/indexing/accumulo/RyaTemporalIndexerTest.java
  *     {@link mvm.rya.indexing.accumulo.RyaTemporalIndexerTest}
  *     {@link mvm.rya.indexing.accumulo.YagoKBTest.java}
- *     
+ *
  * Remember, this class in instantiated fresh for each @test method.
  * so fields are reset, unless they are static.
- * 
+ *
  * These are covered:
- *   Instance {before, equals, after} given Instance 
+ *   Instance {before, equals, after} given Instance
  *   Instance {before, after, inside} given Interval
  *   Instance {hasBeginning, hasEnd} given Interval
  * And a few more.
- * 
+ *
  */
 public final class AccumuloTemporalIndexerTest {
     // Configuration properties, this is reset per test in setup.
@@ -268,7 +265,7 @@ public final class AccumuloTemporalIndexerTest {
 
     /**
      * Test method for {@link AccumuloTemporalIndexer#TemporalIndexerImpl(org.apache.hadoop.conf.Configuration)} .
-     * 
+     *
      * @throws TableExistsException
      * @throws TableNotFoundException
      * @throws AccumuloSecurityException
@@ -283,7 +280,7 @@ public final class AccumuloTemporalIndexerTest {
 
     /**
      * Test method for {@link AccumuloTemporalIndexer#storeStatement(convertStatement(org.openrdf.model.Statement)}
-     * 
+     *
      * @throws NoSuchAlgorithmException
      */
     @Test
@@ -339,7 +336,41 @@ public final class AccumuloTemporalIndexerTest {
         Assert.assertEquals("Number of rows stored.", rowsStoredExpected*4, rowsStoredActual); // 4 index entries per statement
 
     }
-    
+
+    @Test
+    public void testDelete() throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException {
+        // count rows expected to store:
+        int rowsStoredExpected = 0;
+
+        ValueFactory vf = new ValueFactoryImpl();
+
+        URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
+        URI pred2_circa = vf.createURI(URI_PROPERTY_CIRCA);
+
+        final String testDate2014InBRST = "2014-12-31T23:59:59-02:00";
+        final String testDate2016InET = "2016-12-31T20:59:59-05:00";
+
+        // These should be stored because they are in the predicate list.
+        // BUT they will get converted to the same exact datetime in UTC.
+        Statement s1 = new StatementImpl(vf.createURI("foo:subj3"), pred1_atTime, vf.createLiteral(testDate2014InBRST));
+        Statement s2 = new StatementImpl(vf.createURI("foo:subj4"), pred2_circa, vf.createLiteral(testDate2016InET));
+        tIndexer.storeStatement(convertStatement(s1));
+        rowsStoredExpected++;
+        tIndexer.storeStatement(convertStatement(s2));
+        rowsStoredExpected++;
+
+        tIndexer.flush();
+
+        int rowsStoredActual = printTables("junit testing: Temporal entities stored in testDelete before delete", System.out, null);
+        Assert.assertEquals("Number of rows stored.", rowsStoredExpected*4, rowsStoredActual); // 4 index entries per statement
+
+        tIndexer.deleteStatement(convertStatement(s1));
+        tIndexer.deleteStatement(convertStatement(s2));
+
+        int afterDeleteRowsStoredActual = printTables("junit testing: Temporal entities stored in testDelete after delete", System.out, null);
+        Assert.assertEquals("Number of rows stored after delete.", 0, afterDeleteRowsStoredActual);
+    }
+
     @Test
     public void testStoreStatementWithInterestingLiterals() throws Exception {
         ValueFactory vf = new ValueFactoryImpl();
@@ -356,36 +387,35 @@ public final class AccumuloTemporalIndexerTest {
     }
 
     /**
-	     * Test method for {@link AccumuloTemporalIndexer#storeStatement(convertStatement(org.openrdf.model.Statement)}
-	     * 
-	     * @throws NoSuchAlgorithmException
-	     */
-	    @Test
-	    public void testStoreStatementBadInterval() throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException {
-	        // count rows expected to store:
-	        int rowsStoredExpected = 0;
-	
-	        ValueFactory vf = new ValueFactoryImpl();
-	        URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
-	
-	        // Test: Should not store an improper date interval, and log a warning (log warning not tested).
-	        final String invalidDateIntervalString="[bad,interval]";
-			// Silently logs a warning for bad dates.
-            tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), pred1_atTime, vf.createLiteral(invalidDateIntervalString))));
-
-	        final String validDateIntervalString="[2016-12-31T20:59:59-05:00,2016-12-31T21:00:00-05:00]";
-            tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), pred1_atTime, vf.createLiteral(validDateIntervalString))));
-            rowsStoredExpected++;
-            
-	        tIndexer.flush();
-	
-	        int rowsStoredActual = printTables("junit testing: Temporal intervals stored in testStoreStatement", null, null);
-	        Assert.assertEquals("Only good intervals should be stored.", rowsStoredExpected*2, rowsStoredActual); // 2 index entries per interval statement
-	    }
-
-	@Test
-    public void testStoreStatementsSameTime() throws IOException, NoSuchAlgorithmException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException
-    {
+     * Test method for {@link AccumuloTemporalIndexer#storeStatement(convertStatement(org.openrdf.model.Statement)}
+     *
+     * @throws NoSuchAlgorithmException
+     */
+    @Test
+    public void testStoreStatementBadInterval() throws IOException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException {
+        // count rows expected to store:
+        int rowsStoredExpected = 0;
+
+        ValueFactory vf = new ValueFactoryImpl();
+        URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
+
+        // Test: Should not store an improper date interval, and log a warning (log warning not tested).
+        final String invalidDateIntervalString="[bad,interval]";
+        // Silently logs a warning for bad dates.
+        tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj1"), pred1_atTime, vf.createLiteral(invalidDateIntervalString))));
+
+        final String validDateIntervalString="[2016-12-31T20:59:59-05:00,2016-12-31T21:00:00-05:00]";
+        tIndexer.storeStatement(convertStatement(new StatementImpl(vf.createURI("foo:subj2"), pred1_atTime, vf.createLiteral(validDateIntervalString))));
+        rowsStoredExpected++;
+
+        tIndexer.flush();
+
+        int rowsStoredActual = printTables("junit testing: Temporal intervals stored in testStoreStatement", null, null);
+        Assert.assertEquals("Only good intervals should be stored.", rowsStoredExpected*2, rowsStoredActual); // 2 index entries per interval statement
+    }
+
+    @Test
+    public void testStoreStatementsSameTime() throws IOException, NoSuchAlgorithmException, AccumuloException, AccumuloSecurityException, TableNotFoundException, TableExistsException {
         ValueFactory vf = new ValueFactoryImpl();
         URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
         URI pred2_circa = vf.createURI(URI_PROPERTY_CIRCA);
@@ -416,7 +446,7 @@ public final class AccumuloTemporalIndexerTest {
 
     /**
      * Test method for {@link AccumuloTemporalIndexer#storeStatements(java.util.Collection)} .
-     * 
+     *
      * @throws TableExistsException
      * @throws TableNotFoundException
      * @throws AccumuloSecurityException
@@ -482,9 +512,9 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Test instant equal to a given instant.
      * From the series: instant {equal, before, after} instant
-     * @throws AccumuloSecurityException 
-     * @throws AccumuloException 
-     * @throws TableNotFoundException 
+     * @throws AccumuloSecurityException
+     * @throws AccumuloException
+     * @throws TableNotFoundException
      */
     @Test
     public void testQueryInstantEqualsInstant() throws IOException, QueryEvaluationException, TableNotFoundException, AccumuloException, AccumuloSecurityException {
@@ -525,9 +555,9 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Test instant after a given instant.
      * From the series: instant {equal, before, after} instant
-     * @throws AccumuloSecurityException 
-     * @throws AccumuloException 
-     * @throws TableNotFoundException 
+     * @throws AccumuloSecurityException
+     * @throws AccumuloException
+     * @throws TableNotFoundException
      */
     @Test
     public void testQueryInstantAfterInstant() throws IOException, QueryEvaluationException, TableNotFoundException, AccumuloException, AccumuloSecurityException {
@@ -579,7 +609,7 @@ public final class AccumuloTemporalIndexerTest {
         }
         tIndexer.flush();
         CloseableIteration<Statement, QueryEvaluationException> iter;
-        
+
         iter = tIndexer.queryInstantBeforeInstant(seriesTs[searchForSeconds], EMPTY_CONSTRAINTS);
         int count = 0;
         while (iter.hasNext()) {
@@ -763,9 +793,9 @@ public final class AccumuloTemporalIndexerTest {
      * Test method for
      * {@link mvm.rya.indexing.accumulo.temporal.AccumuloTemporalIndexer#queryIntervalEquals(TemporalInterval, StatementContraints)}
      * .
-     * @throws IOException 
-     * @throws QueryEvaluationException 
-     * 
+     * @throws IOException
+     * @throws QueryEvaluationException
+     *
      */
     @Test
     public void testQueryIntervalEquals() throws IOException, QueryEvaluationException {
@@ -789,9 +819,9 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Test interval before a given interval, for method:
      * {@link AccumuloTemporalIndexer#queryIntervalBefore(TemporalInterval, StatementContraints)}.
-     * 
-     * @throws IOException 
-     * @throws QueryEvaluationException 
+     *
+     * @throws IOException
+     * @throws QueryEvaluationException
      */
     @Test
     public void testQueryIntervalBefore() throws IOException, QueryEvaluationException {
@@ -804,7 +834,7 @@ public final class AccumuloTemporalIndexerTest {
         // instants should be ignored.
         tIndexer.storeStatement(convertStatement(spo_B30_E32));
         tIndexer.storeStatement(convertStatement(seriesSpo[1])); // instance at 1 seconds
-        tIndexer.storeStatement(convertStatement(seriesSpo[2])); 
+        tIndexer.storeStatement(convertStatement(seriesSpo[2]));
         tIndexer.storeStatement(convertStatement(seriesSpo[31]));
         tIndexer.flush();
 
@@ -819,9 +849,9 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * interval is after the given interval.  Find interval beginnings after the endings of the given interval.
      * {@link AccumuloTemporalIndexer#queryIntervalAfter(TemporalInterval, StatementContraints).
-     * 
-     * @throws IOException 
-     * @throws QueryEvaluationException 
+     *
+     * @throws IOException
+     * @throws QueryEvaluationException
      */
     @Test
     public void testQueryIntervalAfter() throws IOException, QueryEvaluationException {
@@ -837,7 +867,7 @@ public final class AccumuloTemporalIndexerTest {
         // instants should be ignored.
         tIndexer.storeStatement(convertStatement(spo_B02));
         tIndexer.storeStatement(convertStatement(seriesSpo[1])); // instance at 1 seconds
-        tIndexer.storeStatement(convertStatement(seriesSpo[2])); 
+        tIndexer.storeStatement(convertStatement(seriesSpo[2]));
         tIndexer.storeStatement(convertStatement(seriesSpo[31]));
         tIndexer.flush();
 
@@ -874,7 +904,7 @@ public final class AccumuloTemporalIndexerTest {
         URI pred3_CIRCA_ = vf.createURI(URI_PROPERTY_CIRCA);  // this one to ignore.
         URI pred2_eventTime = vf.createURI(URI_PROPERTY_EVENT_TIME);
         URI pred1_atTime = vf.createURI(URI_PROPERTY_AT_TIME);
-        
+
         // add the predicate = EventTime ; Store in an array for verification.
         Statement[] SeriesTs_EventTime = new Statement[expectedResultCount+1];
         for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here
@@ -892,7 +922,7 @@ public final class AccumuloTemporalIndexerTest {
         CloseableIteration<Statement, QueryEvaluationException> iter;
         StatementContraints constraints = new StatementContraints();
         constraints.setPredicates(new HashSet<URI>(Arrays.asList( pred2_eventTime,  pred1_atTime )));
-         
+
         iter = tIndexer.queryInstantAfterInstant(seriesTs[searchForSeconds], constraints); // EMPTY_CONSTRAINTS);//
         int count_AtTime = 0;
         int count_EventTime = 0;
@@ -910,17 +940,17 @@ public final class AccumuloTemporalIndexerTest {
             } else {
                 assertTrue("This predicate should not be returned: "+s, false);
             }
-                
+
         }
-        
+
         Assert.assertEquals("Should find count of atTime    rows.", expectedResultCount, count_AtTime);
         Assert.assertEquals("Should find count of eventTime rows.", expectedResultCount, count_EventTime);
     }
 
-    
+
     /**
      * Test method for {@link AccumuloTemporalIndexer#getIndexablePredicates()} .
-     * 
+     *
      * @throws TableExistsException
      * @throws TableNotFoundException
      * @throws AccumuloSecurityException
@@ -936,7 +966,7 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Count all the entries in the temporal index table, return the count.
      * Uses printTables for reliability.
-     * 
+     *
      */
     public int countAllRowsInTable() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, NoSuchAlgorithmException {
         return printTables("Counting rows.", null, null);
@@ -944,7 +974,7 @@ public final class AccumuloTemporalIndexerTest {
 
     /**
      * Print and gather statistics on the entire index table.
-     * 
+     *
      * @param description
      *            Printed to the console to find the test case.
      * @param out
@@ -967,19 +997,18 @@ public final class AccumuloTemporalIndexerTest {
         out.println("Reading : " + this.uniquePerTestTemporalIndexTableName);
         out.format(FORMAT, "--Row--", "--ColumnFamily--", "--ColumnQualifier--", "--Value--");
 
-        Scanner s = ConfigUtils.getConnector(conf).createScanner(this.uniquePerTestTemporalIndexTableName, Constants.NO_AUTHS);
+        Scanner s = ConfigUtils.getConnector(conf).createScanner(this.uniquePerTestTemporalIndexTableName, Authorizations.EMPTY);
         for (Entry<Key, org.apache.accumulo.core.data.Value> entry : s) {
             rowsPrinted++;
             Key k = entry.getKey();
-            out.format(FORMAT, toHumanString(k.getRow()), 
-            		toHumanString(k.getColumnFamily()), 
-            		toHumanString(k.getColumnQualifier()), 
-            		toHumanString(entry.getValue()));
+            out.format(FORMAT, toHumanString(k.getRow()),
+                    toHumanString(k.getColumnFamily()),
+                    toHumanString(k.getColumnQualifier()),
+                    toHumanString(entry.getValue()));
             keyHasher = hasher(keyHasher, (StringUtils.getBytesUtf8(entry.getKey().toStringNoTime())));
             valueHasher = hasher(valueHasher, (entry.getValue().get()));
         }
         out.println();
-        // }
 
         if (statistics != null) {
             statistics.put(STAT_COUNT, (long) rowsPrinted);
@@ -994,7 +1023,7 @@ public final class AccumuloTemporalIndexerTest {
     /**
      * Order independent hashcode.
      * Read more: http://stackoverflow.com/questions/18021643/hashing-a-set-of-integers-in-an-order-independent-way
-     * 
+     *
      * @param hashcode
      * @param list
      * @return
@@ -1013,28 +1042,28 @@ public final class AccumuloTemporalIndexerTest {
      * @param value
      * @return Human readable representation.
      */
-	static String toHumanString(Value value) {
-		return toHumanString(value==null?null:value.get());
-	}
-	static String toHumanString(Text text) {
-		return toHumanString(text==null?null:text.copyBytes());
-	}
-	static String toHumanString(byte[] bytes) {
-		if (bytes==null) 
-			return "{null}";
-		StringBuilder sb = new StringBuilder();
-		for (byte b : bytes) {
-			if ((b > 0x7e) || (b < 32)) {
-				sb.append("{");
-				sb.append(Integer.toHexString( b & 0xff )); // Lop off the sign extended ones.
-				sb.append("}");
-			} else if (b == '{'||b == '}') { // Escape the literal braces.
-				sb.append("{");
-				sb.append((char)b);
-				sb.append("}");
-			} else
-				sb.append((char)b);
-		}
-		return sb.toString();
-	}
+    static String toHumanString(Value value) {
+        return toHumanString(value==null?null:value.get());
+    }
+    static String toHumanString(Text text) {
+        return toHumanString(text==null?null:text.copyBytes());
+    }
+    static String toHumanString(byte[] bytes) {
+        if (bytes==null)
+            return "{null}";
+        StringBuilder sb = new StringBuilder();
+        for (byte b : bytes) {
+            if ((b > 0x7e) || (b < 32)) {
+                sb.append("{");
+                sb.append(Integer.toHexString( b & 0xff )); // Lop off the sign extended ones.
+                sb.append("}");
+            } else if (b == '{'||b == '}') { // Escape the literal braces.
+                sb.append("{");
+                sb.append((char)b);
+                sb.append("}");
+            } else
+                sb.append((char)b);
+        }
+        return sb.toString();
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexingExample/src/main/java/EntityDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/EntityDirectExample.java b/extras/indexingExample/src/main/java/EntityDirectExample.java
index ae83520..04b4f74 100644
--- a/extras/indexingExample/src/main/java/EntityDirectExample.java
+++ b/extras/indexingExample/src/main/java/EntityDirectExample.java
@@ -21,11 +21,6 @@
 
 import java.util.List;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.TableNotFoundException;
@@ -47,6 +42,11 @@ import org.openrdf.repository.sail.SailRepository;
 import org.openrdf.repository.sail.SailRepositoryConnection;
 import org.openrdf.sail.Sail;
 
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+
 public class EntityDirectExample {
     private static final Logger log = Logger.getLogger(EntityDirectExample.class);
 
@@ -59,18 +59,18 @@ public class EntityDirectExample {
     private static final String INSTANCE = "instance";
     private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
     private static final String AUTHS = "U";
-    
+
     public static void main(String[] args) throws Exception {
         Configuration conf = getConf();
         conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-        
+
         log.info("Creating the tables as root.");
         SailRepository repository = null;
         SailRepositoryConnection conn = null;
-      
+
         try {
             log.info("Connecting to Indexing Sail Repository.");
-            
+
             Sail extSail = RyaSailFactory.getInstance(conf);
             repository = new SailRepository(extSail);
             repository.initialize();
@@ -80,7 +80,7 @@ public class EntityDirectExample {
             testAddAndDelete(conn);
             log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
             testAddAndTemporalSearchWithPCJ(conn);
-            
+
         } finally {
             log.info("Shutting down");
             closeQuietly(conn);
@@ -108,10 +108,10 @@ public class EntityDirectExample {
         }
     }
 
-    
 
 
-   
+
+
     public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
             RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
             AccumuloException, AccumuloSecurityException, TableNotFoundException {
@@ -127,7 +127,7 @@ public class EntityDirectExample {
 
         Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
         update.execute();
-        
+
         query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
                 + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
         CountingResultHandler resultHandler = new CountingResultHandler();
@@ -138,29 +138,28 @@ public class EntityDirectExample {
         Validate.isTrue(resultHandler.getCount() == 1);
         resultHandler.resetCount();
 
-        //TODO delete currently not implemented in AccumuloRyaDAO for 
-//        // Delete Data
-//        query = "DELETE DATA\n" //
-//                + "{ GRAPH <http://updated/test> {\n"
-//                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
-//                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
-//
-//        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-//        update.execute();
-//
-//        query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
-//                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
-//        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-//        tupleQuery.evaluate(resultHandler);
-//        log.info("Result count : " + resultHandler.getCount());
-//
-//        Validate.isTrue(resultHandler.getCount() == 0);
+        // Delete Data
+        query = "DELETE DATA\n" //
+                + "{ GRAPH <http://updated/test> {\n"
+                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
+
+        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        update.execute();
+
+        query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
+                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+
+        Validate.isTrue(resultHandler.getCount() == 0);
     }
-    
-    
 
-    
-    
+
+
+
+
     private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
 
         // create some resources and literals to make statements out of
@@ -178,7 +177,7 @@ public class EntityDirectExample {
 
         Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
         update.execute();
-        
+
         String queryString = "PREFIX pref: <http://www.model/pref#> \n" //
                 + "SELECT ?x ?z \n" //
                 + "WHERE { \n"
@@ -187,8 +186,8 @@ public class EntityDirectExample {
                 + "  ?x pref:hasProperty2 'property2' . \n"//
                 + "  ?x pref:hasProperty3 'property3' . \n"//
                 + "}";//
-       
-        
+
+
 
         TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         CountingResultHandler tupleHandler = new CountingResultHandler();
@@ -196,7 +195,7 @@ public class EntityDirectExample {
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
         Validate.isTrue(tupleHandler.getBsSize() == 2);
-        
+
         queryString = "PREFIX pref: <http://www.model/pref#> \n" //
                 + "SELECT ?x ?w ?z \n" //
                 + "WHERE { \n"
@@ -204,29 +203,29 @@ public class EntityDirectExample {
                 + "  ?x pref:hasProperty4 'property4' . \n"//
                 + "  ?x pref:hasProperty5 ?w . \n"//
                 + "}";//
-       
-        
+
+
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
         Validate.isTrue(tupleHandler.getBsSize() == 3);
-        
-        
-        queryString = "PREFIX pref: <http://www.model/pref#> " 
-                + "SELECT ?v ?w ?x ?y ?z " 
-                + "WHERE { " 
-                + "  ?w a ?z  . " 
-                + "  ?w pref:hasProperty1 ?v . " 
-                + "  ?w pref:hasProperty2 'property2' . " 
-                + "  ?w pref:hasProperty3 'property3' . " 
+
+
+        queryString = "PREFIX pref: <http://www.model/pref#> "
+                + "SELECT ?v ?w ?x ?y ?z "
+                + "WHERE { "
+                + "  ?w a ?z  . "
+                + "  ?w pref:hasProperty1 ?v . "
+                + "  ?w pref:hasProperty2 'property2' . "
+                + "  ?w pref:hasProperty3 'property3' . "
                 + "  ?x a ?z  . "
-                + "  ?x pref:hasProperty4 'property4' . " 
-                + "  ?x pref:hasProperty5 ?y . " 
+                + "  ?x pref:hasProperty4 'property4' . "
+                + "  ?x pref:hasProperty5 ?y . "
                 + "}";
-       
-        
+
+
 
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         tupleHandler = new CountingResultHandler();
@@ -234,10 +233,10 @@ public class EntityDirectExample {
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
         Validate.isTrue(tupleHandler.getBsSize() == 5);
-        
+
     }
-    
-    
+
+
     private static Configuration getConf() {
 
         AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
@@ -254,7 +253,7 @@ public class EntityDirectExample {
 
         return conf;
     }
-    
+
 
     private static class CountingResultHandler implements TupleQueryResultHandler {
         private int count = 0;
@@ -264,11 +263,11 @@ public class EntityDirectExample {
         public int getCount() {
             return count;
         }
-        
+
         public int getBsSize() {
             return bindingSize;
         }
-        
+
         public void resetBsSize() {
             bindingSize = 0;
             bsSizeSet = false;
@@ -298,14 +297,10 @@ public class EntityDirectExample {
 
         @Override
         public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
         }
 
         @Override
         public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexingExample/src/main/java/RyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/RyaDirectExample.java b/extras/indexingExample/src/main/java/RyaDirectExample.java
index b3e8dae..0d2df3f 100644
--- a/extras/indexingExample/src/main/java/RyaDirectExample.java
+++ b/extras/indexingExample/src/main/java/RyaDirectExample.java
@@ -20,13 +20,6 @@
 
 import java.util.List;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.geo.GeoConstants;
-import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
@@ -60,6 +53,13 @@ import org.openrdf.repository.sail.SailRepositoryConnection;
 import org.openrdf.sail.Sail;
 import org.openrdf.sail.SailException;
 
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.geo.GeoConstants;
+import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;
+
 public class RyaDirectExample {
     private static final Logger log = Logger.getLogger(RyaDirectExample.class);
 
@@ -72,27 +72,27 @@ public class RyaDirectExample {
     private static final String INSTANCE = "instance";
     private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
     private static final String AUTHS = "";
-    
-    
-    
+
+
+
     public static void main(String[] args) throws Exception {
         Configuration conf = getConf();
         conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-        
+
         log.info("Creating the tables as root.");
 //        createTables(addRootConf(conf), conf);
 
         SailRepository repository = null;
         SailRepositoryConnection conn = null;
-      
+
         try {
             log.info("Connecting to Indexing Sail Repository.");
-            
+
             Sail extSail = RyaSailFactory.getInstance(conf);
             repository = new SailRepository(extSail);
             repository.initialize();
             conn = repository.getConnection();
-            
+
             createPCJ(conn);
 
             long start = System.currentTimeMillis();
@@ -110,6 +110,12 @@ public class RyaDirectExample {
             testTemporalFreeGeoSearch(conn);
             log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
             testGeoFreetextWithPCJSearch(conn);
+            log.info("Running SPARQL Example: Delete Temporal Data");
+            testDeleteTemporalData(conn);
+            log.info("Running SPARQL Example: Delete Free Text Data");
+            testDeleteFreeTextData(conn);
+            log.info("Running SPARQL Example: Delete Geo Data");
+            testDeleteGeoData(conn);
 
             log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
         } finally {
@@ -201,10 +207,10 @@ public class RyaDirectExample {
 
         Validate.isTrue(resultHandler.getCount() == 0);
     }
-    
-    
+
+
     private static void testPCJSearch(SailRepositoryConnection conn) throws Exception {
-        
+
         String queryString;
         TupleQuery tupleQuery;
         CountingResultHandler tupleHandler;
@@ -222,7 +228,7 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
-           
+
      // ///////////// search for bob
         queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
                 + "SELECT ?e ?c ?l ?o " //
@@ -236,13 +242,13 @@ public class RyaDirectExample {
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 2);    
-        
+        Validate.isTrue(tupleHandler.getCount() == 2);
+
     }
-    
 
-    
-    
+
+
+
     private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
 
         // create some resources and literals to make statements out of
@@ -254,15 +260,16 @@ public class RyaDirectExample {
                 + "     time:inXSDDateTime '2001-01-01T04:01:02.000-05:00'^^<http://www.w3.org/2001/XMLSchema#dateTime> ;\n" //   2 seconds
                 + "     time:inXSDDateTime \"2001-01-01T01:01:03-08:00\" ;\n" //   3 seconds
                 + "     time:inXSDDateTime '2001-01-01T01:01:04-08:00' ;\n" //   4 seconds
-                + "     time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"   
-                + "     time:inXSDDateTime '2006-01-01' ;\n" 
-                + "     time:inXSDDateTime '2007-01-01' ;\n" 
+                + "     time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"
+                + "     time:inXSDDateTime '2006-01-01' ;\n"
+                + "     time:inXSDDateTime '2007-01-01' ;\n"
                 + "     time:inXSDDateTime '2008-01-01' ; .\n"
                 + "}";
 
         Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
         update.execute();
 
+
         // Find all stored dates.
         String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
                 + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
@@ -271,15 +278,15 @@ public class RyaDirectExample {
                 + "  ?event time:inXSDDateTime ?time . \n"//
                 + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
                 + "}";//
-       
-        
+
+
 
         TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         CountingResultHandler tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 5);
-        
+
         // Find all stored dates.
         queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
                 + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
@@ -320,10 +327,6 @@ public class RyaDirectExample {
     }
 
 
-
-
-
-
     private static void testAddAndFreeTextSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
         // add data to the repository using the SailRepository add methods
         ValueFactory f = conn.getValueFactory();
@@ -338,7 +341,7 @@ public class RyaDirectExample {
         uuid = "urn:people:bobss";
         conn.add(f.createURI(uuid), RDF.TYPE, person);
         conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
-        
+
         String queryString;
         TupleQuery tupleQuery;
         CountingResultHandler tupleHandler;
@@ -355,7 +358,7 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
-        
+
 
         // ///////////// search for alice and bob
         queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
@@ -370,7 +373,7 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 2);
-        
+
      // ///////////// search for alice and bob
         queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
                 + "SELECT ?person ?match " //
@@ -385,8 +388,8 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
-        
-        
+
+
         // ///////////// search for bob
         queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
                 + "SELECT ?person ?match ?e ?c ?l ?o " //
@@ -421,11 +424,11 @@ public class RyaDirectExample {
 
         Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
         u.execute();
-        
+
         String queryString;
         TupleQuery tupleQuery;
         CountingResultHandler tupleHandler;
-        
+
         // point outside search ring
         queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
                 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
@@ -442,7 +445,8 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 0);
-        
+
+
         // point inside search ring
         queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
                 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
@@ -457,14 +461,14 @@ public class RyaDirectExample {
                 + "  ?point geo:asWKT ?wkt . "//
                 + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
                 + "}";//
-         
+
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
-        
-             
+
+
         // point inside search ring with Pre-Computed Join
         queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
                 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
@@ -479,7 +483,7 @@ public class RyaDirectExample {
                 + "  ?point geo:asWKT ?wkt . "//
                 + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
                 + "}";//
-         
+
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
@@ -505,7 +509,7 @@ public class RyaDirectExample {
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 0);
-        
+
         // point inside search ring with different Pre-Computed Join
         queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
                 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
@@ -526,12 +530,12 @@ public class RyaDirectExample {
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
     }
-    
-    
-    private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
+
+
+    private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException,
     RepositoryException, UpdateExecutionException, TupleQueryResultHandlerException, QueryEvaluationException {
-        
-        
+
+
         String queryString;
         TupleQuery tupleQuery;
         CountingResultHandler tupleHandler;
@@ -556,21 +560,21 @@ public class RyaDirectExample {
                 + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
                 + "  FILTER(fts:text(?match, \"pal*\")) " //
                 + "}";//
-        
-        
-        
+
+
+
         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
 
         tupleHandler = new CountingResultHandler();
         tupleQuery.evaluate(tupleHandler);
         log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5); 
-        
+        Validate.isTrue(tupleHandler.getCount() == 5);
+
     }
-    
-    
-    
-    private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
+
+
+
+    private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException,
     RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException {
      // ring outside point
         String queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
@@ -596,12 +600,122 @@ public class RyaDirectExample {
         log.info("Result count : " + tupleHandler.getCount());
         Validate.isTrue(tupleHandler.getCount() == 1);
     }
-    
-    
-    
-    private static void createPCJ(SailRepositoryConnection conn) 
+
+
+    private static void testDeleteTemporalData(SailRepositoryConnection conn) throws Exception {
+        // Delete all stored dates
+        String sparqlDelete = "PREFIX time: <http://www.w3.org/2006/time#>\n"
+                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+                + "DELETE {\n" //
+                + "  ?event time:inXSDDateTime ?time . \n"
+                + "}\n"
+                + "WHERE { \n"
+                + "  ?event time:inXSDDateTime ?time . \n"//
+                + "}";//
+
+        Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlDelete);
+        deleteUpdate.execute();
+
+
+        // Find all stored dates.
+        String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+                + "SELECT ?event ?time \n" //
+                + "WHERE { \n"
+                + "  ?event time:inXSDDateTime ?time . \n"//
+                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+                + "}";//
+
+
+        CountingResultHandler tupleHandler = new CountingResultHandler();
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+    }
+
+
+    private static void testDeleteFreeTextData(SailRepositoryConnection conn) throws Exception {
+        // Delete data from the repository using the SailRepository remove methods
+        ValueFactory f = conn.getValueFactory();
+        URI person = f.createURI("http://example.org/ontology/Person");
+
+        String uuid;
+
+        uuid = "urn:people:alice";
+        conn.remove(f.createURI(uuid), RDF.TYPE, person);
+        conn.remove(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string")));
+
+        uuid = "urn:people:bobss";
+        conn.remove(f.createURI(uuid), RDF.TYPE, person);
+        conn.remove(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
+
+        conn.remove(person, RDFS.LABEL, f.createLiteral("label", "en"));
+
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+        // Find all
+        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+                + "SELECT ?person ?match " //
+                + "{" //
+                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+                + "  ?person a <http://example.org/ontology/Person> . "//
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+    }
+
+
+    private static void testDeleteGeoData(SailRepositoryConnection conn) throws Exception {
+        // Delete all stored points
+        String sparqlDelete = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "DELETE {\n" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "}\n"
+                + "WHERE { \n"
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "}";//
+
+        Update deleteUpdate = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlDelete);
+        deleteUpdate.execute();
+
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+        // Find all stored points
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt " //
+                + "{" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+    }
+
+
+    private static void createPCJ(SailRepositoryConnection conn)
             throws RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException {
-        
+
         String queryString1 = ""//
                 + "SELECT ?e ?c ?l ?o " //
                 + "{" //
@@ -609,7 +723,7 @@ public class RyaDirectExample {
                 + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
                 + "  ?e <uri:talksTo> ?o . "//
                 + "}";//
-        
+
         String queryString2 = ""//
                 + "SELECT ?e ?c ?l ?o " //
                 + "{" //
@@ -617,8 +731,8 @@ public class RyaDirectExample {
                 + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
                 + "  ?e <uri:talksTo> ?o . "//
                 + "}";//
-        
-        
+
+
         URI obj,subclass,talksTo;
         URI person = new URIImpl("urn:people:alice");
         URI feature = new URIImpl("urn:feature");
@@ -632,33 +746,24 @@ public class RyaDirectExample {
         conn.add(sub, RDF.TYPE, subclass);
         conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
         conn.add(sub, talksTo, obj);
-       
-        AccumuloIndexSet ais1 = null; 
-        AccumuloIndexSet ais2 = null; 
+
+        AccumuloIndexSet ais1 = null;
+        AccumuloIndexSet ais2 = null;
         String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
         String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
 
         Connector accCon = new MockInstance(INSTANCE).getConnector("root", new PasswordToken("".getBytes()));
         accCon.tableOperations().create(tablename1);
         accCon.tableOperations().create(tablename2);
-        
+
         try {
             ais1 = new AccumuloIndexSet(queryString1, conn, accCon, tablename1);
             ais2 = new AccumuloIndexSet(queryString2, conn, accCon, tablename2);
-        } catch (MalformedQueryException e) {
-            e.printStackTrace();
-        } catch (SailException e) {
-            e.printStackTrace();
-        } catch (QueryEvaluationException e) {
-            e.printStackTrace();
-        } catch (MutationsRejectedException e) {
-            e.printStackTrace();
-        } catch (TableNotFoundException e) {
-            e.printStackTrace();
+        } catch (MalformedQueryException | SailException | QueryEvaluationException | MutationsRejectedException | TableNotFoundException e) {
+            log.error("Error creating Accumulo Index", e);
         }
-        
     }
-    
+
 
     private static class CountingResultHandler implements TupleQueryResultHandler {
         private int count = 0;
@@ -687,14 +792,10 @@ public class RyaDirectExample {
 
         @Override
         public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
         }
 
         @Override
         public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
         }
     }
 }


[2/3] incubator-rya git commit: RYA-13 Add delete support to secondary indices

Posted by mi...@apache.org.
RYA-13 Add delete support to secondary indices


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/e5e227c1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/e5e227c1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/e5e227c1

Branch: refs/heads/develop
Commit: e5e227c159fdcdb3ccc05af0049b35f78aa4831e
Parents: 80faf06
Author: ejwhite922 <er...@sparta.com>
Authored: Fri Dec 4 16:35:23 2015 -0500
Committer: ejwhite922 <er...@sparta.com>
Committed: Fri Dec 4 16:35:23 2015 -0500

----------------------------------------------------------------------
 .../java/mvm/rya/accumulo/AccumuloRyaDAO.java   |  81 ++--
 .../accumulo/entity/EntityCentricIndex.java     |  64 ++-
 .../freetext/AccumuloFreeTextIndexer.java       | 238 +++++++---
 .../accumulo/geo/GeoMesaGeoIndexer.java         |  90 ++--
 .../temporal/AccumuloTemporalIndexer.java       | 216 ++++++---
 .../freetext/AccumuloFreeTextIndexerTest.java   | 239 ++++++----
 .../indexing/accumulo/geo/GeoIndexerTest.java   | 451 ++++++++++---------
 .../temporal/AccumuloTemporalIndexerTest.java   | 249 +++++-----
 .../src/main/java/EntityDirectExample.java      | 121 +++--
 .../src/main/java/RyaDirectExample.java         | 277 ++++++++----
 10 files changed, 1212 insertions(+), 814 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
index 764ca80..84fae68 100644
--- a/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
+++ b/dao/accumulo.rya/src/main/java/mvm/rya/accumulo/AccumuloRyaDAO.java
@@ -31,7 +31,6 @@ import static mvm.rya.api.RdfCloudTripleStoreConstants.NUM_THREADS;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.RTS_SUBJECT_RYA;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.RTS_VERSION_PREDICATE_RYA;
 import static mvm.rya.api.RdfCloudTripleStoreConstants.VERSION_RYA;
-import info.aduna.iteration.CloseableIteration;
 
 import java.util.Collection;
 import java.util.Collections;
@@ -40,21 +39,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.accumulo.query.AccumuloRyaQueryEngine;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.layout.TableLayoutStrategy;
-import mvm.rya.api.persist.RyaDAO;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.RyaNamespaceManager;
-import mvm.rya.api.resolver.RyaTripleContext;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.api.resolver.triple.TripleRowResolverException;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchDeleter;
@@ -80,6 +64,21 @@ import org.openrdf.model.Namespace;
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
 
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.experimental.AccumuloIndexer;
+import mvm.rya.accumulo.query.AccumuloRyaQueryEngine;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.layout.TableLayoutStrategy;
+import mvm.rya.api.persist.RyaDAO;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.api.persist.RyaNamespaceManager;
+import mvm.rya.api.resolver.RyaTripleContext;
+import mvm.rya.api.resolver.triple.TripleRow;
+import mvm.rya.api.resolver.triple.TripleRowResolverException;
+
 /**
  * Class AccumuloRyaDAO
  * Date: Feb 29, 2012
@@ -102,7 +101,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
     private BatchWriter bw_ns;
 
     private List<AccumuloIndexer> secondaryIndexers;
-    
+
     private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
     private RyaTableMutationsFactory ryaTableMutationsFactory;
     private TableLayoutStrategy tableLayoutStrategy;
@@ -132,15 +131,15 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             tableLayoutStrategy = conf.getTableLayoutStrategy();
             ryaContext = RyaTripleContext.getInstance(conf);
             ryaTableMutationsFactory = new RyaTableMutationsFactory(ryaContext);
-            
+
             secondaryIndexers = conf.getAdditionalIndexers();
-            
+
             TableOperations tableOperations = connector.tableOperations();
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getSpo());
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getPo());
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getOsp());
             AccumuloRdfUtils.createTableIfNotExist(tableOperations, tableLayoutStrategy.getNs());
-            
+
             for (AccumuloIndexer index : secondaryIndexers) {
                 index.setConf(conf);
             }
@@ -154,7 +153,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
 
             bw_ns = connector.createBatchWriter(tableLayoutStrategy.getNs(), MAX_MEMORY,
                     MAX_TIME, 1);
-            
+
             for (AccumuloIndexer index : secondaryIndexers) {
                 index.setMultiTableBatchWriter(mt_bw);
             }
@@ -169,7 +168,8 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
         }
     }
 
-    public String getVersion() throws RyaDAOException {
+    @Override
+	public String getVersion() throws RyaDAOException {
         String version = null;
         CloseableIteration<RyaStatement, RyaDAOException> versIter = queryEngine.query(new RyaStatement(RTS_SUBJECT_RYA, RTS_VERSION_PREDICATE_RYA, null), conf);
         if (versIter.hasNext()) {
@@ -206,6 +206,10 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                 while (query.hasNext()) {
                     deleteSingleRyaStatement(query.next());
                 }
+
+                for (AccumuloIndexer index : secondaryIndexers) {
+                    index.deleteStatement(stmt);
+                }
             }
             mt_bw.flush();
             //TODO currently all indexers do not support delete
@@ -213,7 +217,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             throw new RyaDAOException(e);
         }
     }
-    
+
     @Override
     public void dropGraph(AccumuloRdfConfiguration conf, RyaURI... graphs) throws RyaDAOException {
         BatchDeleter bd_spo = null;
@@ -234,16 +238,16 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                 bd_po.fetchColumnFamily(new Text(graph.getData()));
                 bd_osp.fetchColumnFamily(new Text(graph.getData()));
             }
-            
+
             bd_spo.delete();
             bd_po.delete();
             bd_osp.delete();
-            
+
             //TODO indexers do not support delete-UnsupportedOperation Exception will be thrown
 //            for (AccumuloIndex index : secondaryIndexers) {
 //                index.dropGraph(graphs);
 //            }
-            
+
         } catch (Exception e) {
             throw new RyaDAOException(e);
         } finally {
@@ -251,7 +255,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             if (bd_po != null) bd_po.close();
             if (bd_osp != null) bd_osp.close();
         }
-        
+
     }
 
     protected void deleteSingleRyaStatement(RyaStatement stmt) throws TripleRowResolverException, MutationsRejectedException {
@@ -281,7 +285,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
             //TODO: Should have a lock here in case we are adding and committing at the same time
             while (commitStatements.hasNext()) {
                 RyaStatement stmt = commitStatements.next();
-                
+
                 Map<TABLE_LAYOUT, Collection<Mutation>> mutationMap = ryaTableMutationsFactory.serialize(stmt);
                 Collection<Mutation> spo = mutationMap.get(TABLE_LAYOUT.SPO);
                 Collection<Mutation> po = mutationMap.get(TABLE_LAYOUT.PO);
@@ -289,7 +293,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                 bw_spo.addMutations(spo);
                 bw_po.addMutations(po);
                 bw_osp.addMutations(osp);
-                
+
                 for (AccumuloIndexer index : secondaryIndexers) {
                     index.storeStatement(stmt);
                 }
@@ -433,11 +437,13 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
     	return mt_bw;
     }
 
-    public AccumuloRdfConfiguration getConf() {
+    @Override
+	public AccumuloRdfConfiguration getConf() {
         return conf;
     }
 
-    public void setConf(AccumuloRdfConfiguration conf) {
+    @Override
+	public void setConf(AccumuloRdfConfiguration conf) {
         this.conf = conf;
     }
 
@@ -449,7 +455,8 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
         this.ryaTableMutationsFactory = ryaTableMutationsFactory;
     }
 
-    public AccumuloRyaQueryEngine getQueryEngine() {
+    @Override
+	public AccumuloRyaQueryEngine getQueryEngine() {
         return queryEngine;
     }
 
@@ -460,13 +467,13 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
     protected String[] getTables() {
         // core tables
         List<String> tableNames = Lists.newArrayList(
-                tableLayoutStrategy.getSpo(), 
-                tableLayoutStrategy.getPo(), 
-                tableLayoutStrategy.getOsp(), 
+                tableLayoutStrategy.getSpo(),
+                tableLayoutStrategy.getPo(),
+                tableLayoutStrategy.getOsp(),
                 tableLayoutStrategy.getNs(),
                 tableLayoutStrategy.getEval());
-        
-        // Additional Tables        
+
+        // Additional Tables
         for (AccumuloIndexer index : secondaryIndexers) {
             tableNames.add(index.getTableName());
         }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
index b8b3f65..1e2b18a 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/entity/EntityCentricIndex.java
@@ -30,19 +30,6 @@ import java.io.IOException;
 import java.util.Collection;
 import java.util.List;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaType;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.api.resolver.RyaContext;
-import mvm.rya.api.resolver.RyaTypeResolverException;
-import mvm.rya.api.resolver.triple.TripleRow;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.freetext.AccumuloFreeTextIndexer;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchWriter;
@@ -56,23 +43,21 @@ import org.apache.accumulo.core.security.ColumnVisibility;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Logger;
-import org.openrdf.model.Statement;
-import org.openrdf.query.algebra.evaluation.QueryOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.BindingAssigner;
-import org.openrdf.query.algebra.evaluation.impl.CompareOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.ConjunctiveConstraintSplitter;
-import org.openrdf.query.algebra.evaluation.impl.ConstantOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.DisjunctiveConstraintOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.FilterOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.IterativeEvaluationOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.OrderLimitOptimizer;
-import org.openrdf.query.algebra.evaluation.impl.QueryModelNormalizer;
-import org.openrdf.query.algebra.evaluation.impl.SameTermFilterOptimizer;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.primitives.Bytes;
 
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.domain.RyaType;
+import mvm.rya.api.domain.RyaURI;
+import mvm.rya.api.resolver.RyaContext;
+import mvm.rya.api.resolver.RyaTypeResolverException;
+import mvm.rya.api.resolver.triple.TripleRow;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+
 public class EntityCentricIndex extends AbstractAccumuloIndexer {
 
     private static final Logger logger = Logger.getLogger(EntityCentricIndex.class);
@@ -81,23 +66,23 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
     private AccumuloRdfConfiguration conf;
     private BatchWriter writer;
     private boolean isInit = false;
-    
+
     public static final String CONF_TABLE_SUFFIX = "ac.indexer.eci.tablename";
 
-    
+
     private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, IOException,
             TableExistsException {
         ConfigUtils.createTableIfNotExists(conf, ConfigUtils.getEntityTableName(conf));
     }
-    
-    
-    @Override 
+
+
+    @Override
     public Configuration getConf() {
         return this.conf;
     }
-    
+
   //initialization occurs in setConf because index is created using reflection
-    @Override 
+    @Override
     public void setConf(Configuration conf) {
         if (conf instanceof AccumuloRdfConfiguration) {
             this.conf = (AccumuloRdfConfiguration) conf;
@@ -126,7 +111,7 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
             }
         }
     }
-    
+
 
     @Override
     public String getTableName() {
@@ -147,7 +132,8 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
 
     }
 
-   
+
+    @Override
     public void storeStatement(RyaStatement stmt) throws IOException {
         Preconditions.checkNotNull(writer, "BatchWriter not Set");
         try {
@@ -161,7 +147,8 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
         }
     }
 
-    
+
+    @Override
     public void deleteStatement(RyaStatement stmt) throws IOException {
         Preconditions.checkNotNull(writer, "BatchWriter not Set");
         try {
@@ -185,10 +172,13 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
         byte[] columnQualifier = tripleRow.getColumnQualifier();
         Text cqText = columnQualifier == null ? EMPTY_TEXT : new Text(columnQualifier);
 
-        m.putDelete(cfText, cqText, new ColumnVisibility(tripleRow.getColumnVisibility()), tripleRow.getTimestamp());
+        byte[] columnVisibility = tripleRow.getColumnVisibility();
+        ColumnVisibility cv = columnVisibility == null ? EMPTY_CV : new ColumnVisibility(columnVisibility);
+
+        m.putDelete(cfText, cqText, cv, tripleRow.getTimestamp());
         return m;
     }
-    
+
     public static Collection<Mutation> createMutations(RyaStatement stmt) throws RyaTypeResolverException{
         Collection<Mutation> m = Lists.newArrayList();
         for (TripleRow tr : serializeStatement(stmt)){

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
index f529569..fdefbea 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexer.java
@@ -22,7 +22,6 @@ package mvm.rya.indexing.accumulo.freetext;
 
 
 import static mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils.getNodeIterator;
-import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
@@ -35,25 +34,6 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.resolver.RyaToRdfConversions;
-import mvm.rya.indexing.FreeTextIndexer;
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.Md5Hash;
-import mvm.rya.indexing.accumulo.StatementSerializer;
-import mvm.rya.indexing.accumulo.freetext.iterators.BooleanTreeIterator;
-import mvm.rya.indexing.accumulo.freetext.query.ASTExpression;
-import mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils;
-import mvm.rya.indexing.accumulo.freetext.query.ASTSimpleNode;
-import mvm.rya.indexing.accumulo.freetext.query.ASTTerm;
-import mvm.rya.indexing.accumulo.freetext.query.ParseException;
-import mvm.rya.indexing.accumulo.freetext.query.QueryParser;
-import mvm.rya.indexing.accumulo.freetext.query.QueryParserTreeConstants;
-import mvm.rya.indexing.accumulo.freetext.query.SimpleNode;
-import mvm.rya.indexing.accumulo.freetext.query.TokenMgrError;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchWriter;
@@ -82,6 +62,26 @@ import org.openrdf.query.QueryEvaluationException;
 
 import com.google.common.base.Charsets;
 
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RyaToRdfConversions;
+import mvm.rya.indexing.FreeTextIndexer;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.Md5Hash;
+import mvm.rya.indexing.accumulo.StatementSerializer;
+import mvm.rya.indexing.accumulo.freetext.iterators.BooleanTreeIterator;
+import mvm.rya.indexing.accumulo.freetext.query.ASTExpression;
+import mvm.rya.indexing.accumulo.freetext.query.ASTNodeUtils;
+import mvm.rya.indexing.accumulo.freetext.query.ASTSimpleNode;
+import mvm.rya.indexing.accumulo.freetext.query.ASTTerm;
+import mvm.rya.indexing.accumulo.freetext.query.ParseException;
+import mvm.rya.indexing.accumulo.freetext.query.QueryParser;
+import mvm.rya.indexing.accumulo.freetext.query.QueryParserTreeConstants;
+import mvm.rya.indexing.accumulo.freetext.query.SimpleNode;
+import mvm.rya.indexing.accumulo.freetext.query.TokenMgrError;
+
 /**
  * The {@link AccumuloFreeTextIndexer} stores and queries "free text" data from statements into tables in Accumulo. Specifically, this class
  * stores data into two different Accumulo Tables. This is the <b>document table</b> (default name: triplestore_text) and the <b>terms
@@ -92,27 +92,27 @@ import com.google.common.base.Charsets;
  * <p>
  * For each document, the document table will store the following information:
  * <P>
- * 
+ *
  * <pre>
- * Row (partition) | Column Family  | Column Qualifier | Value 
+ * Row (partition) | Column Family  | Column Qualifier | Value
  * ================+================+==================+==========
- * shardID         | d\x00          | documentHash     | Document 
- * shardID         | s\x00Subject   | documentHash     | (empty) 
- * shardID         | p\x00Predicate | documentHash     | (empty) 
- * shardID         | o\x00Object    | documentHash     | (empty) 
- * shardID         | c\x00Context   | documentHash     | (empty) 
+ * shardID         | d\x00          | documentHash     | Document
+ * shardID         | s\x00Subject   | documentHash     | (empty)
+ * shardID         | p\x00Predicate | documentHash     | (empty)
+ * shardID         | o\x00Object    | documentHash     | (empty)
+ * shardID         | c\x00Context   | documentHash     | (empty)
  * shardID         | t\x00token     | documentHash     | (empty)
  * </pre>
  * <p>
  * Note: documentHash is a sha256 Hash of the Document's Content
  * <p>
- * The terms table is used for expanding wildcard search terms. For each token in the document table, the table sill store the following
+ * The terms table is used for expanding wildcard search terms. For each token in the document table, the table will store the following
  * information:
- * 
+ *
  * <pre>
- * Row (partition)   | CF/CQ/Value 
+ * Row (partition)   | CF/CQ/Value
  * ==================+=============
- * l\x00token        | (empty) 
+ * l\x00token        | (empty)
  * r\x00Reversetoken | (empty)
  * </pre>
  * <p>
@@ -121,7 +121,7 @@ import com.google.common.base.Charsets;
  * into car, bar, and far.
  * <p>
  * Example: Given these three statements as inputs:
- * 
+ *
  * <pre>
  *     <uri:paul> rdfs:label "paul smith"@en <uri:graph1>
  *     <uri:steve> rdfs:label "steven anthony miller"@en <uri:graph1>
@@ -131,9 +131,9 @@ import com.google.common.base.Charsets;
  * Here's what the tables would look like: (Note: the hashes aren't real, the rows are not sorted, and the partition ids will vary.)
  * <p>
  * Triplestore_text
- * 
+ *
  * <pre>
- * Row (partition) | Column Family                   | Column Qualifier | Value 
+ * Row (partition) | Column Family                   | Column Qualifier | Value
  * ================+=================================+==================+==========
  * 000000          | d\x00                           | 08b3d233a        | uri:graph1x00uri:paul\x00rdfs:label\x00"paul smith"@en
  * 000000          | s\x00uri:paul                   | 08b3d233a        | (empty)
@@ -142,7 +142,7 @@ import com.google.common.base.Charsets;
  * 000000          | c\x00uri:graph1                 | 08b3d233a        | (empty)
  * 000000          | t\x00paul                       | 08b3d233a        | (empty)
  * 000000          | t\x00smith                      | 08b3d233a        | (empty)
- * 
+ *
  * 000000          | d\x00                           | 3a575534b        | uri:graph1x00uri:steve\x00rdfs:label\x00"steven anthony miller"@en
  * 000000          | s\x00uri:steve                  | 3a575534b        | (empty)
  * 000000          | p\x00rdfs:label                 | 3a575534b        | (empty)
@@ -151,7 +151,7 @@ import com.google.common.base.Charsets;
  * 000000          | t\x00steven                     | 3a575534b        | (empty)
  * 000000          | t\x00anthony                    | 3a575534b        | (empty)
  * 000000          | t\x00miller                     | 3a575534b        | (empty)
- * 
+ *
  * 000001          | d\x00                           | 7bf670d06        | uri:graph1x00uri:steve\x00rdfs:label\x00"steve miller"@en
  * 000001          | s\x00uri:steve                  | 7bf670d06        | (empty)
  * 000001          | p\x00rdfs:label                 | 7bf670d06        | (empty)
@@ -163,9 +163,9 @@ import com.google.common.base.Charsets;
  * <p>
  * triplestore_terms
  * <p>
- * 
+ *
  * <pre>
- * Row (partition)   | CF/CQ/Value 
+ * Row (partition)   | CF/CQ/Value
  * ==================+=============
  * l\x00paul         | (empty)
  * l\x00smith        | (empty)
@@ -179,12 +179,14 @@ import com.google.common.base.Charsets;
  * r\x00ynohtna      | (empty)
  * r\x00rellim       | (empty)
  * r\x00evets        | (empty)
- * 
+ *
  * <pre>
  */
 public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements FreeTextIndexer  {
     private static final Logger logger = Logger.getLogger(AccumuloFreeTextIndexer.class);
 
+    private static final boolean IS_TERM_TABLE_TOKEN_DELETION_ENABLED = true;
+
     private static final byte[] EMPTY_BYTES = new byte[] {};
     private static final Text EMPTY_TEXT = new Text(EMPTY_BYTES);
     private static final Value EMPTY_VALUE = new Value(EMPTY_BYTES);
@@ -202,10 +204,10 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
     private Set<URI> validPredicates;
 
     private Configuration conf;
-    
+
     private boolean isInit = false;
 
-    
+
     private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException,
             TableExistsException {
         String doctable = ConfigUtils.getFreeTextDocTablename(conf);
@@ -262,8 +264,8 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
 
         queryTermLimit = ConfigUtils.getFreeTextTermLimit(conf);
     }
-    
-    
+
+
   //initialization occurs in setConf because index is created using reflection
     @Override
     public void setConf(Configuration conf) {
@@ -272,27 +274,18 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
             try {
                 init();
                 isInit = true;
-            } catch (AccumuloException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (AccumuloSecurityException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (TableNotFoundException e) {
-                logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
-                throw new RuntimeException(e);
-            } catch (TableExistsException e) {
+            } catch (AccumuloException | AccumuloSecurityException | TableNotFoundException | TableExistsException e) {
                 logger.warn("Unable to initialize index.  Throwing Runtime Exception. ", e);
                 throw new RuntimeException(e);
             }
         }
     }
-    
+
     @Override
     public Configuration getConf() {
         return this.conf;
     }
-    
+
 
     private void storeStatement(Statement statement) throws IOException {
         // if the predicate list is empty, accept all predicates.
@@ -363,6 +356,12 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
         return m;
     }
 
+    private static Mutation createEmptyPutDeleteMutation(Text row) {
+        Mutation m = new Mutation(row);
+        m.putDelete(EMPTY_TEXT, EMPTY_TEXT);
+        return m;
+    }
+
     private static Text genPartition(int partition, int numParitions) {
         int length = Integer.toString(numParitions).length();
         return new Text(String.format("%0" + length + "d", Math.abs(partition % numParitions)));
@@ -471,13 +470,7 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
     private Scanner getScanner(String tablename) throws IOException {
         try {
             return ConfigUtils.createScanner(tablename, conf);
-        } catch (AccumuloException e) {
-            logger.error("Error connecting to " + tablename);
-            throw new IOException(e);
-        } catch (AccumuloSecurityException e) {
-            logger.error("Error connecting to " + tablename);
-            throw new IOException(e);
-        } catch (TableNotFoundException e) {
+        } catch (AccumuloException | AccumuloSecurityException | TableNotFoundException e) {
             logger.error("Error connecting to " + tablename);
             throw new IOException(e);
         }
@@ -574,7 +567,9 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
 
             @Override
             public void close() throws QueryEvaluationException {
-                s.close();
+                if (s != null) {
+                    s.close();
+                }
             }
         };
     }
@@ -582,7 +577,7 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
     /**
      * Simple adapter that parses the query using {@link QueryParser}. Note: any checked exceptions thrown by {@link QueryParser} are
      * re-thrown as {@link IOException}s.
-     * 
+     *
      * @param query
      * @return
      * @throws IOException
@@ -600,12 +595,121 @@ public class AccumuloFreeTextIndexer extends AbstractAccumuloIndexer implements
         }
         return root;
     }
-    
-   
+
+
     @Override
     public String getTableName() {
        return ConfigUtils.getFreeTextDocTablename(conf);
     }
 
-    
+    private void deleteStatement(Statement statement) throws IOException {
+        // if the predicate list is empty, accept all predicates.
+        // Otherwise, make sure the predicate is on the "valid" list
+        boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate());
+
+        if (isValidPredicate && (statement.getObject() instanceof Literal)) {
+
+            // Get the tokens
+            String text = statement.getObject().stringValue().toLowerCase();
+            SortedSet<String> tokens = tokenizer.tokenize(text);
+
+            if (!tokens.isEmpty()) {
+                // Get Document Data
+                String docContent = StatementSerializer.writeStatement(statement);
+
+                String docId = Md5Hash.md5Base64(docContent);
+
+                // Setup partition
+                Text partition = genPartition(docContent.hashCode(), docTableNumPartitions);
+
+                Mutation docTableMut = new Mutation(partition);
+                List<Mutation> termTableMutations = new ArrayList<Mutation>();
+
+                Text docIdText = new Text(docId);
+
+                // Delete the Document Data
+                docTableMut.putDelete(ColumnPrefixes.DOCS_CF_PREFIX, docIdText);
+
+                // Delete the statement parts in index
+                docTableMut.putDelete(ColumnPrefixes.getSubjColFam(statement), docIdText);
+                docTableMut.putDelete(ColumnPrefixes.getPredColFam(statement), docIdText);
+                docTableMut.putDelete(ColumnPrefixes.getObjColFam(statement), docIdText);
+                docTableMut.putDelete(ColumnPrefixes.getContextColFam(statement), docIdText);
+
+
+                // Delete the statement terms in index
+                for (String token : tokens) {
+                    if (IS_TERM_TABLE_TOKEN_DELETION_ENABLED) {
+                        int rowId = Integer.parseInt(partition.toString());
+                        boolean doesTermExistInOtherDocs = doesTermExistInOtherDocs(token, rowId, docIdText);
+                        // Only delete the term from the term table if it doesn't appear in other docs
+                        if (!doesTermExistInOtherDocs) {
+                            // Delete the term in the term table
+                            termTableMutations.add(createEmptyPutDeleteMutation(ColumnPrefixes.getTermListColFam(token)));
+                            termTableMutations.add(createEmptyPutDeleteMutation(ColumnPrefixes.getRevTermListColFam(token)));
+                        }
+                    }
+
+                    // Un-tie the token to the document
+                    docTableMut.putDelete(ColumnPrefixes.getTermColFam(token), docIdText);
+                }
+
+                // write the mutations
+                try {
+                    docTableBw.addMutation(docTableMut);
+                    termTableBw.addMutations(termTableMutations);
+                } catch (MutationsRejectedException e) {
+                    logger.error("error adding mutation", e);
+                    throw new IOException(e);
+                }
+
+            }
+        }
+    }
+
+    @Override
+    public void deleteStatement(RyaStatement statement) throws IOException {
+        deleteStatement(RyaToRdfConversions.convertStatement(statement));
+    }
+
+    /**
+     * Checks to see if the provided term appears in other documents.
+     * @param term the term to search for.
+     * @param currentDocId the current document ID that the search term exists in.
+     * @return {@code true} if the term was found in other documents. {@code false} otherwise.
+     */
+    private boolean doesTermExistInOtherDocs(String term, int currentDocId, Text docIdText) {
+        try {
+            String freeTextDocTableName = ConfigUtils.getFreeTextDocTablename(conf);
+            Scanner scanner = getScanner(freeTextDocTableName);
+
+            String t = StringUtils.removeEnd(term, "*").toLowerCase();
+            Text queryTerm = ColumnPrefixes.getTermColFam(t);
+
+            // perform query and read results
+            scanner.fetchColumnFamily(queryTerm);
+
+            for (Entry<Key, Value> entry : scanner) {
+                Key key = entry.getKey();
+                Text row = key.getRow();
+                int rowId = Integer.parseInt(row.toString());
+                // We only want to check other documents from the one we're deleting
+                if (rowId != currentDocId) {
+                    Text columnFamily = key.getColumnFamily();
+                    String columnFamilyValue = columnFamily.toString();
+                    // Check that the value has the term prefix
+                    if (columnFamilyValue.startsWith(ColumnPrefixes.TERM_CF_PREFIX.toString())) {
+                        Text text = ColumnPrefixes.removePrefix(columnFamily);
+                        String value = text.toString();
+                        if (value.equals(term)) {
+                            return true;
+                        }
+                    }
+                }
+            }
+        } catch (IOException e) {
+            logger.error("Error searching for the existance of the term in other documents", e);
+        }
+        return false;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
index 37acf89..c8b5b4a 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
@@ -1,4 +1,4 @@
-package mvm.rya.indexing.accumulo.geo;
+package mvm.rya.indexing.accumulo.geo;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -21,8 +21,6 @@ package mvm.rya.indexing.accumulo.geo;
 
 
 
-import info.aduna.iteration.CloseableIteration;
-
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
@@ -30,37 +28,23 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RyaToRdfConversions;
-import mvm.rya.indexing.GeoIndexer;
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.Md5Hash;
-import mvm.rya.indexing.accumulo.StatementSerializer;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.MultiTableBatchWriter;
-import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
 import org.geotools.data.DataStore;
 import org.geotools.data.DataStoreFinder;
+import org.geotools.data.DataUtilities;
 import org.geotools.data.FeatureSource;
 import org.geotools.data.FeatureStore;
 import org.geotools.data.Query;
+import org.geotools.factory.CommonFactoryFinder;
 import org.geotools.factory.Hints;
 import org.geotools.feature.DefaultFeatureCollection;
 import org.geotools.feature.FeatureIterator;
@@ -68,21 +52,33 @@ import org.geotools.feature.SchemaException;
 import org.geotools.feature.simple.SimpleFeatureBuilder;
 import org.geotools.filter.text.cql2.CQLException;
 import org.geotools.filter.text.ecql.ECQL;
+import org.locationtech.geomesa.accumulo.data.AccumuloDataStore;
 import org.locationtech.geomesa.accumulo.index.Constants;
 import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes;
 import org.opengis.feature.simple.SimpleFeature;
 import org.opengis.feature.simple.SimpleFeatureType;
 import org.opengis.filter.Filter;
+import org.opengis.filter.FilterFactory;
+import org.opengis.filter.identity.Identifier;
 import org.openrdf.model.Literal;
 import org.openrdf.model.Statement;
 import org.openrdf.model.URI;
 import org.openrdf.query.QueryEvaluationException;
 
-import com.google.common.base.Preconditions;
 import com.vividsolutions.jts.geom.Geometry;
 import com.vividsolutions.jts.io.ParseException;
 import com.vividsolutions.jts.io.WKTReader;
 
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RyaToRdfConversions;
+import mvm.rya.indexing.GeoIndexer;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.Md5Hash;
+import mvm.rya.indexing.accumulo.StatementSerializer;
+
 /**
  * A {@link GeoIndexer} wrapper around a GeoMesa {@link AccumuloDataStore}. This class configures and connects to the Datastore, creates the
  * RDF Feature Type, and interacts with the Datastore.
@@ -129,7 +125,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
     private static final Logger logger = Logger.getLogger(GeoMesaGeoIndexer.class);
 
     private static final String FEATURE_NAME = "RDF";
-  
+
     private static final String SUBJECT_ATTRIBUTE = "S";
     private static final String PREDICATE_ATTRIBUTE = "P";
     private static final String OBJECT_ATTRIBUTE = "O";
@@ -141,7 +137,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
     private FeatureSource<SimpleFeatureType, SimpleFeature> featureSource;
     private SimpleFeatureType featureType;
     private boolean isInit = false;
-   
+
     //initialization occurs in setConf because index is created using reflection
     @Override
     public void setConf(Configuration conf) {
@@ -156,18 +152,18 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
             }
         }
     }
-    
+
     @Override
     public Configuration getConf() {
         return this.conf;
     }
-    
+
 
     private void init() throws IOException {
         validPredicates = ConfigUtils.getGeoPredicates(conf);
 
         DataStore dataStore = createDataStore(conf);
-        
+
         try {
             featureType = getStatementFeatureType(dataStore);
         } catch (IOException e) {
@@ -235,7 +231,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
         // create a feature collection
         DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();
 
-        
+
         for (RyaStatement ryaStatement : ryaStatements) {
 
             Statement statement = RyaToRdfConversions.convertStatement(ryaStatement);
@@ -264,7 +260,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
     public void storeStatement(RyaStatement statement) throws IOException {
         storeStatements(Collections.singleton(statement));
     }
-    
+
     private static SimpleFeature createFeature(SimpleFeatureType featureType, Statement statement) throws ParseException {
         String subject = StatementSerializer.writeSubject(statement);
         String predicate = StatementSerializer.writePredicate(statement);
@@ -358,7 +354,7 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
 
             @Override
             public Statement next() throws QueryEvaluationException {
-                SimpleFeature feature = (SimpleFeature) getIterator().next();
+                SimpleFeature feature = getIterator().next();
                 String subjectString = feature.getAttribute(SUBJECT_ATTRIBUTE).toString();
                 String predicateString = feature.getAttribute(PREDICATE_ATTRIBUTE).toString();
                 String objectString = feature.getAttribute(OBJECT_ATTRIBUTE).toString();
@@ -440,8 +436,42 @@ public class GeoMesaGeoIndexer extends AbstractAccumuloIndexer implements GeoInd
        return ConfigUtils.getGeoTablename(conf);
     }
 
+    private void deleteStatements(Collection<RyaStatement> ryaStatements) throws IOException {
+        // create a feature collection
+        DefaultFeatureCollection featureCollection = new DefaultFeatureCollection();
+
+        for (RyaStatement ryaStatement : ryaStatements) {
+            Statement statement = RyaToRdfConversions.convertStatement(ryaStatement);
+            // if the predicate list is empty, accept all predicates.
+            // Otherwise, make sure the predicate is on the "valid" list
+            boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate());
+
+            if (isValidPredicate && (statement.getObject() instanceof Literal)) {
+                try {
+                    SimpleFeature feature = createFeature(featureType, statement);
+                    featureCollection.add(feature);
+                } catch (ParseException e) {
+                    logger.warn("Error getting geo from statement: " + statement.toString(), e);
+                }
+            }
+        }
 
+        // remove this feature collection from the store
+        if (!featureCollection.isEmpty()) {
+            Set<Identifier> featureIds = new HashSet<Identifier>();
+            FilterFactory filterFactory = CommonFactoryFinder.getFilterFactory(null);
+            Set<String> stringIds = DataUtilities.fidSet(featureCollection);
+            for (String id : stringIds) {
+                featureIds.add(filterFactory.featureId(id));
+            }
+            Filter filter = filterFactory.id(featureIds);
+            featureStore.removeFeatures(filter);
+        }
+    }
 
-  
 
+    @Override
+    public void deleteStatement(RyaStatement statement) throws IOException {
+        deleteStatements(Collections.singleton(statement));
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
index e2f98b3..095f18f 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/temporal/AccumuloTemporalIndexer.java
@@ -20,8 +20,6 @@ package mvm.rya.indexing.accumulo.temporal;
  */
 
 
-import info.aduna.iteration.CloseableIteration;
-
 import java.io.IOException;
 import java.nio.charset.CharacterCodingException;
 import java.util.Collection;
@@ -36,21 +34,6 @@ import java.util.regex.Pattern;
 
 import javax.xml.datatype.XMLGregorianCalendar;
 
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
-import mvm.rya.accumulo.experimental.AccumuloIndexer;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.resolver.RyaToRdfConversions;
-import mvm.rya.indexing.KeyParts;
-import mvm.rya.indexing.StatementContraints;
-import mvm.rya.indexing.TemporalIndexer;
-import mvm.rya.indexing.TemporalInstant;
-import mvm.rya.indexing.TemporalInterval;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.StatementSerializer;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchScanner;
@@ -77,13 +60,24 @@ import org.openrdf.model.URI;
 import org.openrdf.query.QueryEvaluationException;
 
 import cern.colt.Arrays;
+import info.aduna.iteration.CloseableIteration;
+import mvm.rya.accumulo.experimental.AbstractAccumuloIndexer;
+import mvm.rya.api.domain.RyaStatement;
+import mvm.rya.api.resolver.RyaToRdfConversions;
+import mvm.rya.indexing.KeyParts;
+import mvm.rya.indexing.StatementContraints;
+import mvm.rya.indexing.TemporalIndexer;
+import mvm.rya.indexing.TemporalInstant;
+import mvm.rya.indexing.TemporalInterval;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.StatementSerializer;
 
 public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements TemporalIndexer {
 
 	private static final Logger logger = Logger.getLogger(AccumuloTemporalIndexer.class);
 
     private static final String CF_INTERVAL = "interval";
- 
+
 
 
     // Delimiter used in the interval stored in the triple's object literal.
@@ -99,11 +93,11 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
     private Set<URI> validPredicates;
     private String temporalIndexTableName;
-    
+
     private boolean isInit = false;
 
-    
-    
+
+
     private void init() throws AccumuloException, AccumuloSecurityException, TableNotFoundException,
             TableExistsException {
         temporalIndexTableName = ConfigUtils.getTemporalTableName(conf);
@@ -116,7 +110,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
         validPredicates = ConfigUtils.getTemporalPredicates(conf);
     }
-    
+
     //initialization occurs in setConf because index is created using reflection
     @Override
     public void setConf(Configuration conf) {
@@ -140,13 +134,13 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             }
         }
     }
-    
+
     @Override
     public Configuration getConf() {
         return this.conf;
     }
-    
-    
+
+
     /**
      * Store a statement in the index if it meets the criterion: Object should be
      * a literal and one of the validPredicates from the configuration.
@@ -180,18 +174,18 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             throw new IOException("While adding interval/instant for statement =" + statement, e);
         }
     }
-    
-    
+
+
     @Override
     public void storeStatement(RyaStatement statement) throws IllegalArgumentException, IOException {
         storeStatement(RyaToRdfConversions.convertStatement(statement));
     }
-    
-    
+
+
 
     /**
      * parse the literal dates from the object of a statement.
-     * 
+     *
      * @param statement
      * @param outputDateTimes
      */
@@ -209,7 +203,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 	        	outputDateTimes[1] = new DateTime(matcher.group(2));
 	        	return;
     		} catch (java.lang.IllegalArgumentException e) {
-                logThis = e.getMessage() + " " + logThis;	
+                logThis = e.getMessage() + " " + logThis;
                 outputDateTimes[0]=null;
                 outputDateTimes[1]=null;
     		}
@@ -221,7 +215,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 			outputDateTimes[1] = null;
 			return;
 		} catch (java.lang.IllegalArgumentException e) {
-            logThis = e.getMessage();			
+            logThis = e.getMessage();
 		}
 		// Try again using Joda Time DateTime.parse()
 		try {
@@ -230,13 +224,58 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 			//System.out.println(">>>>>>>Joda parsed: "+literalValue.stringValue());
 			return;
 		} catch (java.lang.IllegalArgumentException e) {
-            logThis = e.getMessage() + " " + logThis;			
+            logThis = e.getMessage() + " " + logThis;
 		}
-        logger.warn("TemporalIndexer is unable to parse the date/time from statement="  + statement.toString() + " " +logThis);			
+        logger.warn("TemporalIndexer is unable to parse the date/time from statement="  + statement.toString() + " " +logThis);
 		return;
     }
 
     /**
+     * Remove an interval index
+     * TODO: integrate into KeyParts (or eliminate)
+     * @param writer
+     * @param cv
+     * @param interval
+     * @throws MutationsRejectedException
+     */
+    public void removeInterval(BatchWriter writer, TemporalInterval interval, Statement statement) throws MutationsRejectedException {
+        Text cf = new Text(StatementSerializer.writeContext(statement));
+        Text cqBegin = new Text(KeyParts.CQ_BEGIN);
+        Text cqEnd = new Text(KeyParts.CQ_END);
+
+        // Start Begin index
+        Text keyText = new Text(interval.getAsKeyBeginning());
+        KeyParts.appendUniqueness(statement, keyText);
+        Mutation m = new Mutation(keyText);
+        m.putDelete(cf, cqBegin);
+        writer.addMutation(m);
+
+        // now the end index:
+        keyText = new Text(interval.getAsKeyEnd());
+        KeyParts.appendUniqueness(statement, keyText);
+        m = new Mutation(keyText);
+        m.putDelete(cf, cqEnd);
+        writer.addMutation(m);
+    }
+
+    /**
+     * Remove an interval instant
+     *
+     * @param writer
+     * @param cv
+     * @param instant
+     * @throws MutationsRejectedException
+     */
+    public void removeInstant(BatchWriter writer, TemporalInstant instant, Statement statement) throws MutationsRejectedException {
+        KeyParts keyParts = new KeyParts(statement, instant);
+        for (KeyParts  k: keyParts) {
+            Mutation m = new Mutation(k.getStoreKey());
+            m.putDelete(k.cf, k.cq);
+            writer.addMutation(m);
+        }
+    }
+
+    /**
      * Index a new interval
      * TODO: integrate into KeyParts (or eliminate)
      * @param writer
@@ -250,9 +289,9 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
         Text cf = new Text(StatementSerializer.writeContext(statement));
         Text cqBegin = new Text(KeyParts.CQ_BEGIN);
         Text cqEnd = new Text(KeyParts.CQ_END);
-        
+
         // Start Begin index
-        Text keyText =new Text(interval.getAsKeyBeginning());
+        Text keyText = new Text(interval.getAsKeyBeginning());
         KeyParts.appendUniqueness(statement, keyText);
         Mutation m = new Mutation(keyText);
         m.put(cf, cqBegin, statementValue);
@@ -270,29 +309,29 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
 
     /**
-     * Index a new interval
-     * Make indexes that handle this expression:  
-     *     hash( s? p? ) ?o 
+     * Index a new instant
+     * Make indexes that handle this expression:
+     *     hash( s? p? ) ?o
      *         == o union hash(s)o union hash(p)o  union hash(sp)o
-     * 
+     *
      * @param writer
      * @param cv
      * @param instant
      * @throws MutationsRejectedException
      */
     public void addInstant(BatchWriter writer, TemporalInstant instant, Statement statement) throws MutationsRejectedException {
-    	KeyParts keyParts = new KeyParts(statement, instant);
-    	for (KeyParts k: keyParts) { 
-			Mutation m = new Mutation(k.getStoreKey());
-			m.put(k.cf, k.cq,k.getValue());
-			writer.addMutation(m);
-    	}
+        KeyParts keyParts = new KeyParts(statement, instant);
+        for (KeyParts k : keyParts) {
+            Mutation m = new Mutation(k.getStoreKey());
+            m.put(k.cf, k.cq,k.getValue());
+            writer.addMutation(m);
+        }
     }
 
 
     /**
      * creates a scanner and handles all the throwables and nulls.
-     * 
+     *
      * @param scanner
      * @return
      * @throws IOException
@@ -364,10 +403,10 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 			@Override
 			public Range getRange(KeyParts keyParts) {
 		    	Text start= null;
-				if (keyParts.constraintPrefix != null )  // Yes, has constraints 
+				if (keyParts.constraintPrefix != null )  // Yes, has constraints
 					start = keyParts.constraintPrefix;   // <-- start specific logic
 				else
-					start = new Text(KeyParts.HASH_PREFIX_FOLLOWING);  
+					start = new Text(KeyParts.HASH_PREFIX_FOLLOWING);
 				Text endAt = keyParts.getQueryKey();			       // <-- end specific logic
 				//System.out.println("Scanning queryInstantBeforeInstant: from:" + KeyParts.toHumanString(start) + " up to:" + KeyParts.toHumanString(endAt));
 				return new Range(start, true, endAt, false);
@@ -376,7 +415,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 		ScannerBase scanner = query.doQuery(queryInstant, constraints);
 		return getContextIteratorWrapper(scanner, constraints.getContext());
     }
-    
+
     /**
      * get statements where the date object is after the given queryInstant.
      */
@@ -464,7 +503,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
     /**
      * Get intervals stored in the repository matching the given interval.
-     * Indexing Intervals  will probably change or be removed.  
+     * Indexing Intervals  will probably change or be removed.
      * Currently predicate and subject constraints are filtered on the client.
      */
     @Override
@@ -492,7 +531,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 	/**
 	 * find intervals stored in the repository before the given Interval. Find interval endings that are
 	 * before the given beginning.
-     * Indexing Intervals  will probably change or be removed.  
+     * Indexing Intervals  will probably change or be removed.
      * Currently predicate and subject constraints are filtered on the client.
 	 */
 	@Override
@@ -515,20 +554,20 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 	/**
 	 * Interval after given interval.  Find intervals that begin after the endings of the given interval.
 	 * Use the special following prefix mechanism to avoid matching the beginning date.
-     * Indexing Intervals  will probably change or be removed.  
+     * Indexing Intervals  will probably change or be removed.
      * Currently predicate and subject and context constraints are filtered on the client.
 	 */
 	@Override
 	public CloseableIteration<Statement, QueryEvaluationException> queryIntervalAfter(
 	        TemporalInterval queryInterval, StatementContraints constraints)
 	        throws QueryEvaluationException {
-	
+
 	    Scanner scanner = getScanner();
 	    if (scanner != null) {
 	        // get rows where the start date is greater than the queryInterval.getEnd()
 	        Range range = new Range(new Key(Range.followingPrefix(new Text(queryInterval.getHasEnd().getAsKeyBytes()))), false, null, true);
 	        scanner.setRange(range);
-	        
+
 	        if (constraints.hasContext())
 	        	scanner.fetchColumn(new Text(constraints.getContext().toString()), new Text(KeyParts.CQ_BEGIN));
 	        else
@@ -540,14 +579,14 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 	// --
 	// -- END of Query functions.  Next up, general stuff used by the queries above.
 	// --
-	
+
 	/**
 	 * Allows passing range specific logic into doQuery.
 	 * Each query function implements an anonymous instance of this and calls it's doQuery().
 	 */
 	abstract class Query {
 		abstract protected Range getRange(KeyParts keyParts);
-	
+
 		public ScannerBase doQuery(TemporalInstant queryInstant, StatementContraints constraints) throws QueryEvaluationException {
 			// key is contraintPrefix + time, or just time.
 			// Any constraints handled here, if the constraints are empty, the
@@ -558,7 +597,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 				scanner = getBatchScanner();
 			else
 				scanner = getScanner();
-	
+
 			Collection<Range> ranges = new HashSet<Range>();
 			KeyParts lastKeyParts = null;
 			Range range = null;
@@ -579,7 +618,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
 
 	/**
      * An iteration wrapper for a loaded scanner that is returned for each query above.
-     * 
+     *
      * @param scanner
      *            the results to iterate, then close.
      * @return an anonymous object that will iterate the resulting statements from a given scanner.
@@ -623,14 +662,14 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
         };
     }
 
-    
+
     /**
      * An iteration wrapper for a loaded scanner that is returned for partially supported interval queries above.
-     * 
+     *
      * @param scanner  the results to iterate, then close.
      * @param constraints  limit statements returned by next() to those matching the constraints.
      * @return an anonymous object that will iterate the resulting statements from a given scanner.
-     * @throws QueryEvaluationException 
+     * @throws QueryEvaluationException
      */
 	private static CloseableIteration<Statement, QueryEvaluationException> getConstrainedIteratorWrapper(final Scanner scanner, final StatementContraints constraints) {
 		if (!constraints.hasContext() && !constraints.hasSubject() && !constraints.hasPredicates())
@@ -645,11 +684,11 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
     /**
      * An iteration wrapper for a loaded scanner that is returned for queries above.
      * Currently, this temporal index supports contexts only on the client, using this filter.
-     * 
+     *
      * @param scanner  the results to iterate, then close.
      * @param constraints  limit statements returned by next() to those matching the constraints.
      * @return an anonymous object that will iterate the resulting statements from a given scanner.
-     * @throws QueryEvaluationException 
+     * @throws QueryEvaluationException
      */
 	private static CloseableIteration<Statement, QueryEvaluationException> getContextIteratorWrapper(final ScannerBase scanner, final Resource context) {
 		if (context==null)
@@ -671,7 +710,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
         	private boolean isInitialized = false;
         	final private Iterator<Entry<Key, Value>> i;
         	final private ScannerBase scanner;
-        	
+
         	ConstrainedIteratorWrapper(ScannerBase scanner) {
         		this.scanner = scanner;
         		i=scanner.iterator();
@@ -698,7 +737,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             }
 
 			/**
-			 * Gets the next statement meeting constraints and stores in nextStatement.  
+			 * Gets the next statement meeting constraints and stores in nextStatement.
 			 * Sets null when all done, or on exception.
 			 * @throws QueryEvaluationException
 			 */
@@ -727,7 +766,7 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             	}
 			}
 			public abstract boolean allowedBy(Statement s);
-			
+
             @Override
             public void remove() {
                 throw new UnsupportedOperationException("Remove not implemented");
@@ -751,15 +790,15 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
         	{System.out.println("Constrain subject: "+constraints.getSubject()+" != " + statement.getSubject()); return false;}
     		//return false;
 
-    	if (! allowedByContext(statement, constraints.getContext())) 
+    	if (! allowedByContext(statement, constraints.getContext()))
 		    return false;
     	    //{System.out.println("Constrain context: "+constraints.getContext()+" != " + statement.getContext()); return false;}
-	
+
     	if (constraints.hasPredicates() && ! constraints.getPredicates().contains(statement.getPredicate()))
     		return false;
     	    //{System.out.println("Constrain predicate: "+constraints.getPredicates()+" != " + statement.getPredicate()); return false;}
-    	
-    	System.out.println("allow statement: "+ statement.toString()); 
+
+    	System.out.println("allow statement: "+ statement.toString());
 		return true;
 	}
 
@@ -812,13 +851,42 @@ public class AccumuloTemporalIndexer extends AbstractAccumuloIndexer implements
             throw new IOException(msg, e);
         }
     }
-    
-    
+
+
 
     @Override
     public String getTableName() {
        return ConfigUtils.getTemporalTableName(conf);
     }
 
-    
+    private void deleteStatement(Statement statement) throws IOException, IllegalArgumentException {
+        // if the predicate list is empty, accept all predicates.
+        // Otherwise, make sure the predicate is on the "valid" list
+        boolean isValidPredicate = validPredicates.isEmpty() || validPredicates.contains(statement.getPredicate());
+        if (!isValidPredicate || !(statement.getObject() instanceof Literal))
+            return;
+        DateTime[] indexDateTimes = new DateTime[2]; // 0 begin, 1 end of interval
+        extractDateTime(statement, indexDateTimes);
+        if (indexDateTimes[0] == null) {
+            return;
+        }
+
+        // Remove this as an instant, or interval.
+        try {
+            if (indexDateTimes[1] != null) {
+                TemporalInterval interval = new TemporalInterval(new TemporalInstantRfc3339(indexDateTimes[0]), new TemporalInstantRfc3339(indexDateTimes[1]));
+                removeInterval(temporalIndexBatchWriter, interval, statement);
+            } else {
+                TemporalInstant instant = new TemporalInstantRfc3339(indexDateTimes[0]);
+                removeInstant(temporalIndexBatchWriter, instant, statement);
+            }
+        } catch (MutationsRejectedException e) {
+            throw new IOException("While adding interval/instant for statement =" + statement, e);
+        }
+    }
+
+    @Override
+    public void deleteStatement(RyaStatement statement) throws IllegalArgumentException, IOException {
+        deleteStatement(RyaToRdfConversions.convertStatement(statement));
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/e5e227c1/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
index a0a3a03..c6bd9c2 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/freetext/AccumuloFreeTextIndexerTest.java
@@ -1,5 +1,30 @@
 package mvm.rya.indexing.accumulo.freetext;
 
+import java.util.HashSet;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.admin.TableOperations;
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.URIImpl;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.model.vocabulary.RDFS;
+
+import com.google.common.collect.Sets;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -22,11 +47,6 @@ package mvm.rya.indexing.accumulo.freetext;
 
 
 import info.aduna.iteration.CloseableIteration;
-
-import java.util.HashSet;
-import java.util.Map.Entry;
-import java.util.Set;
-
 import junit.framework.Assert;
 import mvm.rya.api.domain.RyaStatement;
 import mvm.rya.api.domain.RyaType;
@@ -36,27 +56,6 @@ import mvm.rya.api.resolver.RyaToRdfConversions;
 import mvm.rya.indexing.StatementContraints;
 import mvm.rya.indexing.accumulo.ConfigUtils;
 
-import org.apache.accumulo.core.Constants;
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.admin.TableOperations;
-import org.apache.accumulo.core.data.Key;
-import org.apache.hadoop.conf.Configuration;
-import org.junit.Before;
-import org.junit.Test;
-import org.openrdf.model.Statement;
-import org.openrdf.model.URI;
-import org.openrdf.model.Value;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.URIImpl;
-import org.openrdf.model.impl.ValueFactoryImpl;
-import org.openrdf.model.vocabulary.RDFS;
-
-import com.google.common.collect.Sets;
-
 public class AccumuloFreeTextIndexerTest {
     private static final StatementContraints EMPTY_CONSTRAINTS = new StatementContraints();
 
@@ -90,107 +89,153 @@ public class AccumuloFreeTextIndexerTest {
 
     @Test
     public void testSearch() throws Exception {
-        
-        AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer();
-        f.setConf(conf);
+        try (AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+
+            URI subject = new URIImpl("foo:subj");
+            URI predicate = RDFS.LABEL;
+            Value object = vf.createLiteral("this is a new hat");
+
+            URI context = new URIImpl("foo:context");
 
-        ValueFactory vf = new ValueFactoryImpl();
+            Statement statement = vf.createStatement(subject, predicate, object, context);
+            f.storeStatement(RdfToRyaConversions.convertStatement(statement));
+            f.flush();
 
-        URI subject = new URIImpl("foo:subj");
-        URI predicate = RDFS.LABEL;
-        Value object = vf.createLiteral("this is a new hat");
+            printTables(conf);
 
-        URI context = new URIImpl("foo:context");
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("asdf", EMPTY_CONSTRAINTS)));
 
-        Statement statement = vf.createStatement(subject, predicate, object, context);
-        f.storeStatement(RdfToRyaConversions.convertStatement(statement));
-        f.flush();
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & !is", EMPTY_CONSTRAINTS)));
 
-        printTables(conf);
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("is", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("a", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("new", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
 
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("asdf", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("ha*", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("*at", EMPTY_CONSTRAINTS)));
 
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & !is", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat & new", EMPTY_CONSTRAINTS)));
 
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("is", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("a", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("new", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this & hat & new", EMPTY_CONSTRAINTS)));
 
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("ha*", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("*at", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("bat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & bat", EMPTY_CONSTRAINTS)));
+        }
+    }
+
+    @Test
+    public void testDelete() throws Exception {
+        try (AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+
+            URI subject1 = new URIImpl("foo:subj");
+            URI predicate1 = RDFS.LABEL;
+            Value object1 = vf.createLiteral("this is a new hat");
+
+            URI context1 = new URIImpl("foo:context");
+
+            Statement statement1 = vf.createStatement(subject1, predicate1, object1, context1);
+            f.storeStatement(RdfToRyaConversions.convertStatement(statement1));
+
+            URI subject2 = new URIImpl("foo:subject");
+            URI predicate2 = RDFS.LABEL;
+            Value object2 = vf.createLiteral("Do you like my new hat?");
 
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat & new", EMPTY_CONSTRAINTS)));
+            URI context2 = new URIImpl("foo:context");
 
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("this & hat & new", EMPTY_CONSTRAINTS)));
+            Statement statement2 = vf.createStatement(subject2, predicate2, object2, context2);
+            f.storeStatement(RdfToRyaConversions.convertStatement(statement2));
 
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("bat", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this & bat", EMPTY_CONSTRAINTS)));
+            f.flush();
 
-        f.close();
+
+            System.out.println("testDelete: BEFORE DELETE");
+            printTables(conf);
+
+            f.deleteStatement(RdfToRyaConversions.convertStatement(statement1));
+            System.out.println("testDelete: AFTER FIRST DELETION");
+            printTables(conf);
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this is a new hat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement2), getSet(f.queryText("Do you like my new hat?", EMPTY_CONSTRAINTS)));
+
+            // Check that "new" didn't get deleted from the term table after "this is a new hat"
+            // was deleted since "new" is still in "Do you like my new hat?"
+            Assert.assertEquals(Sets.newHashSet(statement2), getSet(f.queryText("new", EMPTY_CONSTRAINTS)));
+
+            f.deleteStatement(RdfToRyaConversions.convertStatement(statement2));
+            System.out.println("testDelete: AFTER LAST DELETION");
+            printTables(conf);
+
+            System.out.println("testDelete: DONE");
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this is a new hat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("Do you like my new hat?", EMPTY_CONSTRAINTS)));
+        }
     }
 
     @Test
     public void testRestrictPredicatesSearch() throws Exception {
         conf.setStrings(ConfigUtils.FREETEXT_PREDICATES_LIST, "pred:1,pred:2");
-        
-        AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer();
-        f.setConf(conf);
 
-        // These should not be stored because they are not in the predicate list
-        f.storeStatement(new RyaStatement(new RyaURI("foo:subj1"), new RyaURI(RDFS.LABEL.toString()), new RyaType("invalid")));
-        f.storeStatement(new RyaStatement(new RyaURI("foo:subj2"), new RyaURI(RDFS.COMMENT.toString()), new RyaType("invalid")));
+        try (AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer()) {
+            f.setConf(conf);
 
-        RyaURI pred1 = new RyaURI("pred:1");
-        RyaURI pred2 = new RyaURI("pred:2");
+            // These should not be stored because they are not in the predicate list
+            f.storeStatement(new RyaStatement(new RyaURI("foo:subj1"), new RyaURI(RDFS.LABEL.toString()), new RyaType("invalid")));
+            f.storeStatement(new RyaStatement(new RyaURI("foo:subj2"), new RyaURI(RDFS.COMMENT.toString()), new RyaType("invalid")));
 
-        // These should be stored because they are in the predicate list
-        RyaStatement s3 = new RyaStatement(new RyaURI("foo:subj3"), pred1, new RyaType("valid"));
-        RyaStatement s4 = new RyaStatement(new RyaURI("foo:subj4"), pred2, new RyaType("valid"));
-        f.storeStatement(s3);
-        f.storeStatement(s4);
+            RyaURI pred1 = new RyaURI("pred:1");
+            RyaURI pred2 = new RyaURI("pred:2");
 
-        // This should not be stored because the object is not a literal
-        f.storeStatement(new RyaStatement(new RyaURI("foo:subj5"), pred1, new RyaURI("in:valid")));
+            // These should be stored because they are in the predicate list
+            RyaStatement s3 = new RyaStatement(new RyaURI("foo:subj3"), pred1, new RyaType("valid"));
+            RyaStatement s4 = new RyaStatement(new RyaURI("foo:subj4"), pred2, new RyaType("valid"));
+            f.storeStatement(s3);
+            f.storeStatement(s4);
 
-        f.flush();
+            // This should not be stored because the object is not a literal
+            f.storeStatement(new RyaStatement(new RyaURI("foo:subj5"), pred1, new RyaURI("in:valid")));
 
-        printTables(conf);
+            f.flush();
 
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("invalid", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("in:valid", EMPTY_CONSTRAINTS)));
+            printTables(conf);
 
-        Set<Statement> actual = getSet(f.queryText("valid", EMPTY_CONSTRAINTS));
-        Assert.assertEquals(2, actual.size());
-        Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s3)));
-        Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s4)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("invalid", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("in:valid", EMPTY_CONSTRAINTS)));
 
-        f.close();
+            Set<Statement> actual = getSet(f.queryText("valid", EMPTY_CONSTRAINTS));
+            Assert.assertEquals(2, actual.size());
+            Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s3)));
+            Assert.assertTrue(actual.contains(RyaToRdfConversions.convertStatement(s4)));
+        }
     }
 
     @Test
     public void testContextSearch() throws Exception {
-        
-        AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer();
-        f.setConf(conf);
-
-        ValueFactory vf = new ValueFactoryImpl();
-        URI subject = new URIImpl("foo:subj");
-        URI predicate = new URIImpl(RDFS.COMMENT.toString());
-        Value object = vf.createLiteral("this is a new hat");
-        URI context = new URIImpl("foo:context");
-
-        Statement statement = vf.createStatement(subject, predicate, object, context);
-        f.storeStatement(RdfToRyaConversions.convertStatement(statement));
-        f.flush();
-
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
-        Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", new StatementContraints().setContext(context))));
-        Assert.assertEquals(Sets.newHashSet(),
-                getSet(f.queryText("hat", new StatementContraints().setContext(vf.createURI("foo:context2")))));
-
-        f.close();
+        try (AccumuloFreeTextIndexer f = new AccumuloFreeTextIndexer()) {
+            f.setConf(conf);
+
+            ValueFactory vf = new ValueFactoryImpl();
+            URI subject = new URIImpl("foo:subj");
+            URI predicate = new URIImpl(RDFS.COMMENT.toString());
+            Value object = vf.createLiteral("this is a new hat");
+            URI context = new URIImpl("foo:context");
+
+            Statement statement = vf.createStatement(subject, predicate, object, context);
+            f.storeStatement(RdfToRyaConversions.convertStatement(statement));
+            f.flush();
+
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", EMPTY_CONSTRAINTS)));
+            Assert.assertEquals(Sets.newHashSet(statement), getSet(f.queryText("hat", new StatementContraints().setContext(context))));
+            Assert.assertEquals(Sets.newHashSet(),
+                    getSet(f.queryText("hat", new StatementContraints().setContext(vf.createURI("foo:context2")))));
+        }
     }
 
     public static void printTables(Configuration conf) throws AccumuloException, AccumuloSecurityException, TableNotFoundException {
@@ -201,7 +246,7 @@ public class AccumuloFreeTextIndexerTest {
         for (String table : tops.list()) {
             System.out.println("Reading : " + table);
             System.out.format(FORMAT, "--Row--", "--ColumnFamily--", "--ColumnQualifier--", "--Value--");
-            Scanner s = ConfigUtils.getConnector(conf).createScanner(table, Constants.NO_AUTHS);
+            Scanner s = ConfigUtils.getConnector(conf).createScanner(table, Authorizations.EMPTY);
             for (Entry<Key, org.apache.accumulo.core.data.Value> entry : s) {
                 Key k = entry.getKey();
                 System.out.format(FORMAT, k.getRow(), k.getColumnFamily(), k.getColumnQualifier(), entry.getValue());


[3/3] incubator-rya git commit: RYA-14 Updated to Java 1.7; Minor geo issue

Posted by mi...@apache.org.
RYA-14 Updated to Java 1.7; Minor geo issue

Also fixed a minor geo issue with RYA-13 commit.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/990f1ffe
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/990f1ffe
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/990f1ffe

Branch: refs/heads/develop
Commit: 990f1ffe272fe13b99367528fb8bb661f8e29f45
Parents: e5e227c
Author: Aaron Mihalik <mi...@alum.mit.edu>
Authored: Fri Dec 4 18:08:53 2015 -0500
Committer: Aaron Mihalik <mi...@alum.mit.edu>
Committed: Fri Dec 4 18:08:53 2015 -0500

----------------------------------------------------------------------
 .../mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java  |  2 +-
 .../mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java     |  3 ++-
 pom.xml                                                   | 10 ++--------
 3 files changed, 5 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/990f1ffe/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
index c8b5b4a..9d01751 100644
--- a/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/accumulo/geo/GeoMesaGeoIndexer.java
@@ -1,4 +1,4 @@
-package mvm.rya.indexing.accumulo.geo;
+package mvm.rya.indexing.accumulo.geo;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/990f1ffe/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
index c204f3c..25db123 100644
--- a/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/accumulo/geo/GeoIndexerTest.java
@@ -237,7 +237,8 @@ public class GeoIndexerTest {
             Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pOut, EMPTY_CONSTRAINTS)));
 
             // test a ring for the whole world and make sure the point is gone
-            double[] world = { -180, 90, 180, 90, -180, 90, -180, -90, -180, 90 };
+            // Geomesa is a little sensitive around lon 180, so we only go to 179
+            double[] world = { -180, 90, 179, 90, 179, -90, -180, -90, -180, 90 };
             LinearRing rWorld = gf.createLinearRing(new PackedCoordinateSequence.Double(world, 2));
             Polygon pWorld = gf.createPolygon(rWorld, new LinearRing[] {});
             Assert.assertEquals(Sets.newHashSet(), getSet(f.queryWithin(pWorld, EMPTY_CONSTRAINTS)));

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/990f1ffe/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5f2164f..dce799f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -500,6 +500,8 @@ under the License.
                     <artifactId>maven-compiler-plugin</artifactId>
                     <configuration>
                         <encoding>${project.build.sourceEncoding}</encoding>
+                        <source>1.7</source>
+                        <target>1.7</target>
                     </configuration>
                 </plugin>
                 <plugin>
@@ -649,14 +651,6 @@ under the License.
             </plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <configuration>
-                    <source>1.6</source>
-                    <target>1.6</target>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-enforcer-plugin</artifactId>
                 <executions>
                     <execution>