You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@rya.apache.org by mi...@apache.org on 2015/12/07 13:04:42 UTC

[12/51] [partial] incubator-rya git commit: Cannot delete temp branch, doc'd it.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/indexingExample/src/main/java/RyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/RyaDirectExample.java b/extras/indexingExample/src/main/java/RyaDirectExample.java
deleted file mode 100644
index b3e8dae..0000000
--- a/extras/indexingExample/src/main/java/RyaDirectExample.java
+++ /dev/null
@@ -1,700 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import java.util.List;
-
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.indexing.accumulo.geo.GeoConstants;
-import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.MutationsRejectedException;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.apache.accumulo.core.client.security.tokens.PasswordToken;
-import org.apache.commons.lang.Validate;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
-import org.openrdf.model.URI;
-import org.openrdf.model.ValueFactory;
-import org.openrdf.model.impl.LiteralImpl;
-import org.openrdf.model.impl.URIImpl;
-import org.openrdf.model.vocabulary.RDF;
-import org.openrdf.model.vocabulary.RDFS;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.QueryResultHandlerException;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandler;
-import org.openrdf.query.TupleQueryResultHandlerException;
-import org.openrdf.query.Update;
-import org.openrdf.query.UpdateExecutionException;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-import org.openrdf.sail.Sail;
-import org.openrdf.sail.SailException;
-
-public class RyaDirectExample {
-    private static final Logger log = Logger.getLogger(RyaDirectExample.class);
-
-    //
-    // Connection configuration parameters
-    //
-
-    private static final boolean USE_MOCK_INSTANCE = true;
-    private static final boolean PRINT_QUERIES = true;
-    private static final String INSTANCE = "instance";
-    private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
-    private static final String AUTHS = "";
-    
-    
-    
-    public static void main(String[] args) throws Exception {
-        Configuration conf = getConf();
-        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-        
-        log.info("Creating the tables as root.");
-//        createTables(addRootConf(conf), conf);
-
-        SailRepository repository = null;
-        SailRepositoryConnection conn = null;
-      
-        try {
-            log.info("Connecting to Indexing Sail Repository.");
-            
-            Sail extSail = RyaSailFactory.getInstance(conf);
-            repository = new SailRepository(extSail);
-            repository.initialize();
-            conn = repository.getConnection();
-            
-            createPCJ(conn);
-
-            long start = System.currentTimeMillis();
-            log.info("Running SPARQL Example: Add and Delete");
-            testAddAndDelete(conn);
-            log.info("Running SAIL/SPARQL Example: PCJ Search");
-            testPCJSearch(conn);
-            log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
-            testAddAndTemporalSearchWithPCJ(conn);
-            log.info("Running SAIL/SPARQL Example: Add and Free Text Search with PCJ");
-            testAddAndFreeTextSearchWithPCJ(conn);
-            log.info("Running SPARQL Example: Add Point and Geo Search with PCJ");
-            testAddPointAndWithinSearchWithPCJ(conn);
-            log.info("Running SPARQL Example: Temporal, Freetext, and Geo Search");
-            testTemporalFreeGeoSearch(conn);
-            log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
-            testGeoFreetextWithPCJSearch(conn);
-
-            log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
-        } finally {
-            log.info("Shutting down");
-            closeQuietly(conn);
-            closeQuietly(repository);
-        }
-    }
-
-    private static void closeQuietly(SailRepository repository) {
-        if (repository != null) {
-            try {
-                repository.shutDown();
-            } catch (RepositoryException e) {
-                // quietly absorb this exception
-            }
-        }
-    }
-
-    private static void closeQuietly(SailRepositoryConnection conn) {
-        if (conn != null) {
-            try {
-                conn.close();
-            } catch (RepositoryException e) {
-                // quietly absorb this exception
-            }
-        }
-    }
-
-    private static Configuration getConf() {
-
-        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-
-        conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE);
-        conf.set(ConfigUtils.USE_PCJ, "true");
-        conf.set(ConfigUtils.USE_GEO, "true");
-        conf.set(ConfigUtils.USE_FREETEXT, "true");
-        conf.set(ConfigUtils.USE_TEMPORAL, "true");
-        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX);
-        conf.set(ConfigUtils.CLOUDBASE_USER, "root");
-        conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "");
-        conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE);
-        conf.setInt(ConfigUtils.NUM_PARTITIONS, 3);
-        conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS);
-
-        // only geo index statements with geo:asWKT predicates
-        conf.set(ConfigUtils.GEO_PREDICATES_LIST, GeoConstants.GEO_AS_WKT.stringValue());
-        return conf;
-    }
-
-    public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
-            RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
-            AccumuloException, AccumuloSecurityException, TableNotFoundException {
-
-        // Add data
-        String query = "INSERT DATA\n"//
-                + "{ GRAPH <http://updated/test> {\n"//
-                + "  <http://acme.com/people/Mike> " //
-                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
-                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
-
-        log.info("Performing Query");
-
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-        update.execute();
-
-        query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
-        CountingResultHandler resultHandler = new CountingResultHandler();
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.evaluate(resultHandler);
-        log.info("Result count : " + resultHandler.getCount());
-
-        Validate.isTrue(resultHandler.getCount() == 2);
-        resultHandler.resetCount();
-
-        // Delete Data
-        query = "DELETE DATA\n" //
-                + "{ GRAPH <http://updated/test> {\n"
-                + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
-                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
-
-        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-        update.execute();
-
-        query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-        tupleQuery.evaluate(resultHandler);
-        log.info("Result count : " + resultHandler.getCount());
-
-        Validate.isTrue(resultHandler.getCount() == 0);
-    }
-    
-    
-    private static void testPCJSearch(SailRepositoryConnection conn) throws Exception {
-        
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-
-     // ///////////// search for bob
-        queryString = "SELECT ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "}";//
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-           
-     // ///////////// search for bob
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?c a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "}";//
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 2);    
-        
-    }
-    
-
-    
-    
-    private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
-
-        // create some resources and literals to make statements out of
-
-        String sparqlInsert = "PREFIX time: <http://www.w3.org/2006/time#>\n"
-                + "INSERT DATA {\n" //
-                + "_:eventz       a       time:Instant ;\n"
-                + "     time:inXSDDateTime '2001-01-01T01:01:01-08:00' ;\n" //  one second
-                + "     time:inXSDDateTime '2001-01-01T04:01:02.000-05:00'^^<http://www.w3.org/2001/XMLSchema#dateTime> ;\n" //   2 seconds
-                + "     time:inXSDDateTime \"2001-01-01T01:01:03-08:00\" ;\n" //   3 seconds
-                + "     time:inXSDDateTime '2001-01-01T01:01:04-08:00' ;\n" //   4 seconds
-                + "     time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"   
-                + "     time:inXSDDateTime '2006-01-01' ;\n" 
-                + "     time:inXSDDateTime '2007-01-01' ;\n" 
-                + "     time:inXSDDateTime '2008-01-01' ; .\n"
-                + "}";
-
-        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
-        update.execute();
-
-        // Find all stored dates.
-        String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
-                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
-                + "SELECT ?event ?time \n" //
-                + "WHERE { \n"
-                + "  ?event time:inXSDDateTime ?time . \n"//
-                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
-                + "}";//
-       
-        
-
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        CountingResultHandler tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5);
-        
-        // Find all stored dates.
-        queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
-                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
-                + "SELECT ?event ?time \n" //
-                + "WHERE { \n"
-                + "  ?event time:inXSDDateTime ?time . \n"//
-                + "  ?event a  time:Instant . \n"//
-                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
-                + "}";//
-
-
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5);
-
-
-        // Find all stored dates.
-        queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
-                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
-                + "SELECT ?event ?time ?e ?c ?l ?o \n" //
-                + "WHERE { \n"
-                + "  ?e a ?c . \n"//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . \n"//
-                + "  ?e <uri:talksTo> ?o . \n"//
-                + "  ?event a  time:Instant . \n"//
-                + "  ?event time:inXSDDateTime ?time . \n"//
-                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
-                + "}";//
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5);
-    }
-
-
-
-
-
-
-    private static void testAddAndFreeTextSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
-        // add data to the repository using the SailRepository add methods
-        ValueFactory f = conn.getValueFactory();
-        URI person = f.createURI("http://example.org/ontology/Person");
-
-        String uuid;
-
-        uuid = "urn:people:alice";
-        conn.add(f.createURI(uuid), RDF.TYPE, person);
-        conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string")));
-
-        uuid = "urn:people:bobss";
-        conn.add(f.createURI(uuid), RDF.TYPE, person);
-        conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
-        
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-
-        // ///////////// search for alice
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?person ?match ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"pal*\")) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        
-
-        // ///////////// search for alice and bob
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?person ?match " //
-                + "{" //
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                  + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 2);
-        
-     // ///////////// search for alice and bob
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?person ?match " //
-                + "{" //
-                + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
-                + "  FILTER(fts:text(?match, \"pal*\")) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        
-        
-        // ///////////// search for bob
-        queryString = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?person ?match ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"!alice & hose\")) " //
-                + "}";//
-
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-    }
-
-
-
-    private static void testAddPointAndWithinSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
-
-        String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "INSERT DATA { " //
-                + "  <urn:feature> a geo:Feature ; " //
-                + "    geo:hasGeometry [ " //
-                + "      a geo:Point ; " //
-                + "      geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "//
-                + "    ] . " //
-                + "}";
-
-        Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
-        u.execute();
-        
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-        
-        // point outside search ring
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt " //
-                + "{" //
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 0);
-        
-        // point inside search ring
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
-                + "{" //
-                + "  ?feature a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-         
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-        
-             
-        // point inside search ring with Pre-Computed Join
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
-                + "{" //
-                + "  ?feature a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-         
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs
-
-        // point outside search ring with PCJ
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?l ?o " //
-                + "{" //
-                + "  ?feature a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 0);
-        
-        // point inside search ring with different Pre-Computed Join
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-    }
-    
-    
-    private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
-    RepositoryException, UpdateExecutionException, TupleQueryResultHandlerException, QueryEvaluationException {
-        
-        
-        String queryString;
-        TupleQuery tupleQuery;
-        CountingResultHandler tupleHandler;
-
-        // ring containing point
-        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "PREFIX time: <http://www.w3.org/2006/time#> "//
-                + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> "//
-                + "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "SELECT ?feature ?point ?wkt ?event ?time ?person ?match" //
-                + "{" //
-                + "  ?event a  time:Instant . \n"//
-                + "  ?event time:inXSDDateTime ?time . \n"//
-                + "  FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)). " //
-                + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"pal*\")) " //
-                + "}";//
-        
-        
-        
-        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-
-        tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 5); 
-        
-    }
-    
-    
-    
-    private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException, 
-    RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException {
-     // ring outside point
-        String queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-                + "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-                + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-                + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o ?person ?match " //
-                + "{" //
-                + "  ?person a <http://example.org/ontology/Person> . "//
-                + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
-                + "  FILTER(fts:text(?match, \"!alice & hose\")) " //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "  ?feature a geo:Feature . "//
-                + "  ?feature geo:hasGeometry ?point . "//
-                + "  ?point a geo:Point . "//
-                + "  ?point geo:asWKT ?wkt . "//
-                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
-                + "}";//
-        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-        CountingResultHandler tupleHandler = new CountingResultHandler();
-        tupleQuery.evaluate(tupleHandler);
-        log.info("Result count : " + tupleHandler.getCount());
-        Validate.isTrue(tupleHandler.getCount() == 1);
-    }
-    
-    
-    
-    private static void createPCJ(SailRepositoryConnection conn) 
-            throws RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException {
-        
-        String queryString1 = ""//
-                + "SELECT ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?c a ?e . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "}";//
-        
-        String queryString2 = ""//
-                + "SELECT ?e ?c ?l ?o " //
-                + "{" //
-                + "  ?e a ?c . "//
-                + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-                + "  ?e <uri:talksTo> ?o . "//
-                + "}";//
-        
-        
-        URI obj,subclass,talksTo;
-        URI person = new URIImpl("urn:people:alice");
-        URI feature = new URIImpl("urn:feature");
-        URI sub = new URIImpl("uri:entity");
-        subclass = new URIImpl("uri:class");
-        obj = new URIImpl("uri:obj");
-        talksTo = new URIImpl("uri:talksTo");
-
-        conn.add(person, RDF.TYPE, sub);
-        conn.add(feature, RDF.TYPE, sub);
-        conn.add(sub, RDF.TYPE, subclass);
-        conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
-        conn.add(sub, talksTo, obj);
-       
-        AccumuloIndexSet ais1 = null; 
-        AccumuloIndexSet ais2 = null; 
-        String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
-        String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
-
-        Connector accCon = new MockInstance(INSTANCE).getConnector("root", new PasswordToken("".getBytes()));
-        accCon.tableOperations().create(tablename1);
-        accCon.tableOperations().create(tablename2);
-        
-        try {
-            ais1 = new AccumuloIndexSet(queryString1, conn, accCon, tablename1);
-            ais2 = new AccumuloIndexSet(queryString2, conn, accCon, tablename2);
-        } catch (MalformedQueryException e) {
-            e.printStackTrace();
-        } catch (SailException e) {
-            e.printStackTrace();
-        } catch (QueryEvaluationException e) {
-            e.printStackTrace();
-        } catch (MutationsRejectedException e) {
-            e.printStackTrace();
-        } catch (TableNotFoundException e) {
-            e.printStackTrace();
-        }
-        
-    }
-    
-
-    private static class CountingResultHandler implements TupleQueryResultHandler {
-        private int count = 0;
-
-        public int getCount() {
-            return count;
-        }
-
-        public void resetCount() {
-            this.count = 0;
-        }
-
-        @Override
-        public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
-            count++;
-            System.out.println(arg0);
-        }
-
-        @Override
-        public void endQueryResult() throws TupleQueryResultHandlerException {
-        }
-
-        @Override
-        public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
-        }
-
-        @Override
-        public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
-          // TODO Auto-generated method stub
-          
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat b/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
deleted file mode 100644
index a89e3d1..0000000
--- a/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
+++ /dev/null
@@ -1,41 +0,0 @@
-@echo off
-rem Licensed to the Apache Software Foundation (ASF) under one
-rem or more contributor license agreements.  See the NOTICE file
-rem distributed with this work for additional information
-rem regarding copyright ownership.  The ASF licenses this file
-rem to you under the Apache License, Version 2.0 (the
-rem "License"); you may not use this file except in compliance
-rem with the License.  You may obtain a copy of the License at
-rem 
-rem   http://www.apache.org/licenses/LICENSE-2.0
-rem 
-rem Unless required by applicable law or agreed to in writing,
-rem software distributed under the License is distributed on an
-rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-rem KIND, either express or implied.  See the License for the
-rem specific language governing permissions and limitations
-rem under the License.
-SET CP=
-
-REM Check to see if javac is on the path
-where /Q javac
-IF %ERRORLEVEL% NEQ 0 goto :NO_JAVAC
-
-
-for /f %%f in ('DIR /b .\lib\*.jar') do call :append .\lib\%%f
-
-javac -cp "%CP%" RyaDirectExample.java
-java -cp "%CP%" RyaDirectExample
-
-goto :end
-
-:append
-@echo off
-SET CP=%CP%%1;
-goto :end
-
-:NO_JAVAC
-echo ERROR: Could not find javac
-goto :end
-
-:end

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/pom.xml
----------------------------------------------------------------------
diff --git a/extras/pom.xml b/extras/pom.xml
deleted file mode 100644
index f3a88d3..0000000
--- a/extras/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.rya</groupId>
-        <artifactId>rya-project</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>rya.extras</artifactId>
-    <name>Apache Rya Extra Projects</name>
-
-    <packaging>pom</packaging>
-
-    <modules>
-        <module>rya.prospector</module>
-        <module>rya.manual</module>
-        <module>tinkerpop.rya</module>
-        <module>rya.console</module>
-        <module>indexing</module>
-        <module>indexingExample</module>
-    </modules>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.console/.gitignore
----------------------------------------------------------------------
diff --git a/extras/rya.console/.gitignore b/extras/rya.console/.gitignore
deleted file mode 100644
index 5d1172a..0000000
--- a/extras/rya.console/.gitignore
+++ /dev/null
@@ -1,8 +0,0 @@
-/.classpath
-/.project
-.settings/
-target/
-/log.roo
-*.log
-
-/bin/

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.console/pom.xml
----------------------------------------------------------------------
diff --git a/extras/rya.console/pom.xml b/extras/rya.console/pom.xml
deleted file mode 100644
index 1bbb5a0..0000000
--- a/extras/rya.console/pom.xml
+++ /dev/null
@@ -1,100 +0,0 @@
-<?xml version='1.0'?>
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.rya</groupId>
-        <artifactId>rya.extras</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>rya.console</artifactId>
-    <name>Apache Rya Console</name>
-
-    <properties>
-        <jar.mainclass>org.springframework.shell.Bootstrap</jar.mainclass>
-    </properties>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.rya</groupId>
-            <artifactId>rya.api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.rya</groupId>
-            <artifactId>accumulo.rya</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>jline</groupId>
-                    <artifactId>jline</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>org.springframework.shell</groupId>
-            <artifactId>spring-shell</artifactId>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-dependency-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>copy-dependencies</id>
-                        <phase>prepare-package</phase>
-                        <goals>
-                            <goal>copy-dependencies</goal>
-                        </goals>
-                        <configuration>
-                            <outputDirectory>${project.build.directory}/lib</outputDirectory>
-                            <overWriteReleases>true</overWriteReleases>
-                            <overWriteSnapshots>true</overWriteSnapshots>
-                            <overWriteIfNewer>true</overWriteIfNewer>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <configuration>
-                    <archive>
-                        <manifest>
-                            <addClasspath>true</addClasspath>
-                            <!--<useUniqueVersions>false</useUniqueVersions> -->
-                            <classpathPrefix>lib/</classpathPrefix>
-                            <mainClass>${jar.mainclass}</mainClass>
-                        </manifest>
-                        <manifestEntries>
-                            <version>${project.version}</version>
-                        </manifestEntries>
-                    </archive>
-                </configuration>
-            </plugin>
-        </plugins>
-
-    </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java
deleted file mode 100644
index 2d0fac8..0000000
--- a/extras/rya.console/src/main/java/mvm/rya/console/RyaBannerProvider.java
+++ /dev/null
@@ -1,69 +0,0 @@
-package mvm.rya.console;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import org.springframework.core.Ordered;
-import org.springframework.core.annotation.Order;
-import org.springframework.shell.core.CommandMarker;
-import org.springframework.shell.core.annotation.CliCommand;
-import org.springframework.shell.plugin.support.DefaultBannerProvider;
-import org.springframework.shell.support.util.OsUtils;
-import org.springframework.stereotype.Component;
-
-/**
- * @author Jarred Li
- */
-@Component
-@Order(Ordered.HIGHEST_PRECEDENCE)
-public class RyaBannerProvider extends DefaultBannerProvider
-        implements CommandMarker {
-
-    @CliCommand(value = {"version"}, help = "Displays current CLI version")
-    @Override
-    public String getBanner() {
-        StringBuffer buf = new StringBuffer();
-        buf.append("" +
-                "________                    _________                         ______     \n" +
-                "___  __ \\____  _______ _    __  ____/____________________________  /____ \n" +
-                "__  /_/ /_  / / /  __ `/    _  /    _  __ \\_  __ \\_  ___/  __ \\_  /_  _ \\\n" +
-                "_  _, _/_  /_/ // /_/ /     / /___  / /_/ /  / / /(__  )/ /_/ /  / /  __/\n" +
-                "/_/ |_| _\\__, / \\__,_/      \\____/  \\____//_/ /_//____/ \\____//_/  \\___/ \n" +
-                "        /____/ " + OsUtils.LINE_SEPARATOR);
-        buf.append("Version:" + this.getVersion());
-        return buf.toString();
-
-    }
-
-    @Override
-    public String getVersion() {
-        return "3.0.0";
-    }
-
-    @Override
-    public String getWelcomeMessage() {
-        return "Welcome to the Rya Console";
-    }
-
-    @Override
-    public String getProviderName() {
-        return "rya";
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java
deleted file mode 100644
index 3f63b20..0000000
--- a/extras/rya.console/src/main/java/mvm/rya/console/RyaConsoleCommands.java
+++ /dev/null
@@ -1,230 +0,0 @@
-package mvm.rya.console;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import info.aduna.iteration.CloseableIteration;
-
-import java.io.FileInputStream;
-import java.io.StringReader;
-import java.util.Formatter;
-import java.util.Locale;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.accumulo.AccumuloRyaDAO;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.api.domain.RyaStatement;
-import mvm.rya.api.domain.RyaURI;
-import mvm.rya.api.persist.RyaDAO;
-import mvm.rya.api.persist.RyaDAOException;
-import mvm.rya.api.persist.query.RyaQueryEngine;
-import mvm.rya.api.resolver.RdfToRyaConversions;
-import mvm.rya.api.resolver.RyaContext;
-
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.ZooKeeperInstance;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.openrdf.model.Statement;
-import org.openrdf.rio.RDFHandler;
-import org.openrdf.rio.RDFHandlerException;
-import org.openrdf.rio.RDFParser;
-import org.openrdf.rio.ntriples.NTriplesParserFactory;
-import org.springframework.shell.core.CommandMarker;
-import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
-import org.springframework.shell.core.annotation.CliCommand;
-import org.springframework.shell.core.annotation.CliOption;
-import org.springframework.stereotype.Component;
-
-@Component
-public class RyaConsoleCommands implements CommandMarker {
-
-    private static final NTriplesParserFactory N_TRIPLES_PARSER_FACTORY = new NTriplesParserFactory();
-
-    protected final Logger LOG = Logger.getLogger(getClass().getName());
-
-    private RyaContext ryaContext = RyaContext.getInstance();
-    private RyaDAO ryaDAO;
-    private RDFParser ntrips_parser = null;
-
-    public RyaConsoleCommands() {
-        ntrips_parser = N_TRIPLES_PARSER_FACTORY.getParser();
-        ntrips_parser.setRDFHandler(new RDFHandler() {
-
-            public void startRDF() throws RDFHandlerException {
-
-            }
-
-            public void endRDF() throws RDFHandlerException {
-
-            }
-
-            public void handleNamespace(String s, String s1) throws RDFHandlerException {
-
-            }
-
-            public void handleStatement(Statement statement) throws RDFHandlerException {
-                try {
-                    RyaStatement ryaStatement = RdfToRyaConversions.convertStatement(statement);
-                    ryaDAO.add(ryaStatement);
-                } catch (Exception e) {
-                    throw new RDFHandlerException(e);
-                }
-            }
-
-            public void handleComment(String s) throws RDFHandlerException {
-
-            }
-        });
-    }
-
-    /**
-     * commands:
-     * 1. connect(instance, user, password, zk)
-     * 1.a. disconnect
-     * 2. query
-     * 3. add
-     */
-
-    @CliAvailabilityIndicator({"connect"})
-    public boolean isConnectAvailable() {
-        return true;
-    }
-
-    @CliAvailabilityIndicator({"qt", "add", "load", "disconnect"})
-    public boolean isCommandAvailable() {
-        return ryaDAO != null;
-    }
-
-    @CliCommand(value = "qt", help = "Query with Triple Pattern")
-    public String queryTriple(
-            @CliOption(key = {"subject"}, mandatory = false, help = "Subject") final String subject,
-            @CliOption(key = {"predicate"}, mandatory = false, help = "Predicate") final String predicate,
-            @CliOption(key = {"object"}, mandatory = false, help = "Object") final String object,
-            @CliOption(key = {"context"}, mandatory = false, help = "Context") final String context,
-            @CliOption(key = {"maxResults"}, mandatory = false, help = "Maximum Number of Results", unspecifiedDefaultValue = "100") final String maxResults
-    ) {
-        try {
-            RdfCloudTripleStoreConfiguration conf = ryaDAO.getConf().clone();
-            if (maxResults != null) {
-                conf.setLimit(Long.parseLong(maxResults));
-            }
-            RyaQueryEngine queryEngine = ryaDAO.getQueryEngine();
-            CloseableIteration<RyaStatement, RyaDAOException> query =
-                    queryEngine.query(new RyaStatement(
-                            (subject != null) ? (new RyaURI(subject)) : null,
-                            (predicate != null) ? (new RyaURI(predicate)) : null,
-                            (object != null) ? (new RyaURI(object)) : null,
-                            (context != null) ? (new RyaURI(context)) : null), conf);
-            StringBuilder sb = new StringBuilder();
-            Formatter formatter = new Formatter(sb, Locale.US);
-            String format = "%-40s %-40s %-40s %-40s\n";
-            formatter.format(format, "Subject", "Predicate",
-                    "Object", "Context");
-            while (query.hasNext()) {
-                RyaStatement next = query.next();
-                formatter.format(format, next.getSubject().getData(), next.getPredicate().getData(),
-                        next.getObject().getData(), (next.getContext() != null) ? (next.getContext().getData()) : (null));
-                sb.append("\n");
-            }
-            return sb.toString();
-        } catch (Exception e) {
-            LOG.log(Level.SEVERE, "", e);
-        }
-        return "";
-    }
-
-    @CliCommand(value = "load", help = "Load file")
-    public void load(
-            @CliOption(key = {"", "file"}, mandatory = true, help = "File of ntriples rdf to load") final String file
-    ) {
-        //diff formats?
-        //diff types of urls
-        try {
-            ntrips_parser.parse(new FileInputStream(file), "");
-        } catch (Exception e) {
-            LOG.log(Level.SEVERE, "", e);
-        }
-    }
-
-    @CliCommand(value = "add", help = "Add Statement")
-    public void add(
-            @CliOption(key = {"", "statement"}, mandatory = true, help = "Statement in NTriples format") final String statement) {
-        try {
-            ntrips_parser.parse(new StringReader(statement), "");
-        } catch (Exception e) {
-            LOG.log(Level.SEVERE, "", e);
-        }
-    }
-
-    @CliCommand(value = "connect", help = "Connect to Rya Triple Store")
-    public String connect(
-            @CliOption(key = {"instance"}, mandatory = true, help = "Accumulo Instance") final String instance,
-            @CliOption(key = {"user"}, mandatory = true, help = "Accumulo User") final String user,
-            @CliOption(key = {"pwd"}, mandatory = true, help = "Accumulo Pwd") final String pwd,
-            @CliOption(key = {"zk"}, mandatory = true, help = "Accumulo Zk (zk=mock for the mock instance)") final String zk,
-            @CliOption(key = {"pre"}, mandatory = false, help = "Accumulo table prefix", unspecifiedDefaultValue = "rya_") final String pre) {
-        try {
-            //using Cloudbase
-            Connector connector = null;
-            AccumuloRyaDAO cryaDao = new AccumuloRyaDAO();
-            if ("mock".equals(zk)) {
-                //mock instance
-                connector = new MockInstance(instance).getConnector(user, pwd);
-            } else {
-                connector = new ZooKeeperInstance(instance, zk).getConnector(user, pwd);
-            }
-
-            cryaDao.setConnector(connector);
-            AccumuloRdfConfiguration configuration = new AccumuloRdfConfiguration();
-            configuration.setTablePrefix(pre);
-            cryaDao.setConf(configuration);
-            cryaDao.init();
-            this.ryaDAO = cryaDao;
-            return "Connected to Accumulo";
-        } catch (Exception e) {
-            LOG.log(Level.SEVERE, "", e);
-        }
-        return "";
-    }
-
-    @CliCommand(value = "disconnect", help = "Disconnect from Rya Store")
-    public String disconnect() {
-        if (ryaDAO == null) {
-            return "Command is not available because Rya is not connected. Please 'connect' first.";
-        }
-        try {
-            this.ryaDAO.destroy();
-            this.ryaDAO = null;
-        } catch (RyaDAOException e) {
-            LOG.log(Level.SEVERE, "", e);
-        }
-        return "";
-    }
-
-    public RyaDAO getRyaDAO() {
-        return ryaDAO;
-    }
-
-    public void setRyaDAO(RyaDAO ryaDAO) {
-        this.ryaDAO = ryaDAO;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java
deleted file mode 100644
index 97182aa..0000000
--- a/extras/rya.console/src/main/java/mvm/rya/console/RyaHistoryFileNameProvider.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package mvm.rya.console;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import org.springframework.core.Ordered;
-import org.springframework.core.annotation.Order;
-import org.springframework.shell.plugin.support.DefaultHistoryFileNameProvider;
-import org.springframework.stereotype.Component;
-
-/**
- * 
- * @author Jarred Li
- *
- */
-@Component
-@Order(Ordered.HIGHEST_PRECEDENCE)
-public class RyaHistoryFileNameProvider extends DefaultHistoryFileNameProvider{
-
-    @Override
-	public String getHistoryFileName() {
-		return "ryaconsole.log";
-	}
-
-	@Override
-	public String getProviderName() {
-		return "Rya Console History Log";
-	}
-	
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java b/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java
deleted file mode 100644
index b199819..0000000
--- a/extras/rya.console/src/main/java/mvm/rya/console/RyaPromptProvider.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package mvm.rya.console;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-import org.springframework.core.Ordered;
-import org.springframework.core.annotation.Order;
-import org.springframework.shell.plugin.support.DefaultPromptProvider;
-import org.springframework.stereotype.Component;
-
-/**
- * @author Jarred Li
- *
- */
-@Component
-@Order(Ordered.HIGHEST_PRECEDENCE)
-public class RyaPromptProvider extends DefaultPromptProvider {
-
-	@Override
-	public String getPrompt() {
-		return "rya>";
-	}
-
-	
-	@Override
-	public String getProviderName() {
-		return "Rya Console Prompt";
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml
----------------------------------------------------------------------
diff --git a/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml b/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml
deleted file mode 100644
index e593a48..0000000
--- a/extras/rya.console/src/main/resources/META-INF/spring/spring-shell-plugin.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-
-<beans xmlns="http://www.springframework.org/schema/beans"
-	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xmlns:context="http://www.springframework.org/schema/context"
-	xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
-		http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.1.xsd">
-
-	<context:component-scan base-package="mvm.rya.console" />
-
-</beans>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/pom.xml
----------------------------------------------------------------------
diff --git a/extras/rya.manual/pom.xml b/extras/rya.manual/pom.xml
deleted file mode 100644
index 75c106a..0000000
--- a/extras/rya.manual/pom.xml
+++ /dev/null
@@ -1,53 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.rya</groupId>
-        <artifactId>rya.extras</artifactId>
-        <version>3.2.10-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>rya.manual</artifactId>
-    <name>Apache Rya Manual</name>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-site-plugin</artifactId>
-                <dependencies>
-                    <dependency>
-                        <groupId>org.apache.maven.doxia</groupId>
-                        <artifactId>doxia-module-markdown</artifactId>
-                        <version>1.6</version>
-                    </dependency>
-                </dependencies>
-                <configuration>
-                    <inputEncoding>UTF-8</inputEncoding>
-                    <outputEncoding>UTF-8</outputEncoding>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-</project>
-

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/_index.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/_index.md b/extras/rya.manual/src/site/markdown/_index.md
deleted file mode 100644
index bf030a3..0000000
--- a/extras/rya.manual/src/site/markdown/_index.md
+++ /dev/null
@@ -1,44 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-
-# Rya
-- [Overview](overview.md)
-- [Quick Start](quickstart.md)
-- [Load Data](loaddata.md)
-- [Query Data](querydata.md)
-- [Evaluation Table](eval.md)
-- [Pre-computed Joins](loadPrecomputedJoin.md)
-- [Inferencing](infer.md)
-
-# Samples
-- [Typical First Steps](sm-firststeps.md)
-- [Simple Add/Query/Remove Statements](sm-simpleaqr.md)
-- [Sparql query](sm-sparqlquery.md)
-- [Adding Authentication](sm-addauth.md)
-- [Inferencing](sm-infer.md)
-- [Named Graph](sm-namedgraph.md)
-- [Update data](sm-updatedata.md)
-- [Alx](alx.md)
-
-# Development
-- [Building From Source](build-source.md)
-- [LTS Maven Settings XML](maven-settings.md)

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/alx.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/alx.md b/extras/rya.manual/src/site/markdown/alx.md
deleted file mode 100644
index 2d0eae7..0000000
--- a/extras/rya.manual/src/site/markdown/alx.md
+++ /dev/null
@@ -1,82 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-# Alx Rya Integration
-
-Alx is a modular framework for developing applications. Rya has mechanisms to integrate directly into Alx to provide other modules access to queries.
-
-Currently, the Alx Rya extension only allows interacting with an Accumulo store.
-
-## Prerequisites
-
-- Alx 1.0.5+ (we will refer to it at the ALX_HOME directory from now on)
-- alx.rya features xml (can be found in maven at `mvn:mvm.rya/alx.rya/<version>/xml/features`)
-
-## Steps
-
-1. Start up Alx
-2. features:addurl alx.rya features xml
-3. features:install alx.rya
-4. (optional) features:install alx.rya.console
-
-That's it. To make sure, run `ls <alx.rya bundle id>` and make sure something like this pops up:
-
-```
-mvm.rya.alx.rya (99) provides:
-------------------------------
-Bundle-SymbolicName = mvm.rya.alx.rya
-Bundle-Version = 3.0.4.SNAPSHOT
-objectClass = org.osgi.service.cm.ManagedService
-service.id = 226
-service.pid = mvm.rya.alx
-----
-...
-```
-
-## Using
-
-The bundle registers a Sail Repository, so you can interact with it directly as in the other code examples. Here is a quick groovy example of the usage:
-
-``` JAVA
-import org.springframework.osgi.extensions.annotation.*;
-import org.openrdf.repository.*;
-import org.openrdf.model.ValueFactory;
-import static mvm.rya.api.RdfCloudTripleStoreConstants.*;
-
-class TstRepo {
-
-	@ServiceReference
-	public void setRepo(Repository repo) {
-		println repo
-		RepositoryConnection conn = repo.getConnection();
-		ValueFactory vf = VALUE_FACTORY;
-        def statements = conn.getStatements(vf.createURI("http://www.Department0.University0.edu"), null, null, true);
-        while(statements.hasNext()) {
-            System.out.println(statements.next());
-        }
-        statements.close();
-        conn.close();
-	}
-
-}
-```
-
-The bundle also registers a RyaDAO, so you can interact with the RyaDAO interface directly

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/build-source.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/build-source.md b/extras/rya.manual/src/site/markdown/build-source.md
deleted file mode 100644
index 07f0cb5..0000000
--- a/extras/rya.manual/src/site/markdown/build-source.md
+++ /dev/null
@@ -1,36 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-# Building from Source
-
-## Prerequisites
-
-* Rya code
-* Maven 2.2 +
-
-## Building
-
-Using Git, pull down the latest code from the url above.
-
-Run the command to build the code `mvn clean install`
-
-If all goes well, here are the artifacts that you will be interested in:
-* Rya-WAR : web/web-rya/target/web.rya.war

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/eval.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/eval.md b/extras/rya.manual/src/site/markdown/eval.md
deleted file mode 100644
index fc4095b..0000000
--- a/extras/rya.manual/src/site/markdown/eval.md
+++ /dev/null
@@ -1,79 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-# Prospects Table
-
-The Prospects Table provides statistics on the number of subject/predicate/object data found in the triple store. It is currently a
-Map Reduce job that will run against the Rya store and save all the statistics in the prosepcts table.
-
-## Build
-
-[Build the mmrts.git repo](build-source.md)
-
-## Run
-
-Deploy the `extras/rya.prospector/target/rya.prospector-<version>-shade.jar` file to the hadoop cluster.
-
-The prospector also requires a configuration file that defines where Accumulo is, which Rya table (has to be the SPO table) to read from, and
-which table to output to. (Note: Make sure you follow the same schema as the Rya tables (prospects table name: tableprefix_prospects)
-
-A sample configuration file might look like the following:
-
-``` XML
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<configuration>
-    <property>
-        <name>prospector.intable</name>
-        <value>triplestore_spo</value>
-    </property>
-    <property>
-        <name>prospector.outtable</name>
-        <value>triplestore_prospects</value>
-    </property>
-    <property>
-        <name>prospector.auths</name>
-        <value>U,FOUO</value>
-    </property>
-    <property>
-        <name>instance</name>
-        <value>accumulo</value>
-    </property>
-    <property>
-        <name>zookeepers</name>
-        <value>localhost:2181</value>
-    </property>
-    <property>
-        <name>username</name>
-        <value>root</value>
-    </property>
-    <property>
-        <name>password</name>
-        <value>secret</value>
-    </property>
-</configuration>
-```
-
-Run the command, filling in the correct information.
-
-```
-hadoop jar rya.prospector-3.0.4-SNAPSHOT-shade.jar mvm.rya.prospector.mr.Prospector /tmp/prospectorConf.xml
-```
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/index.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/index.md b/extras/rya.manual/src/site/markdown/index.md
deleted file mode 100644
index 0748284..0000000
--- a/extras/rya.manual/src/site/markdown/index.md
+++ /dev/null
@@ -1,45 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-# Rya
-
-This project contains documentation about the Rya, a scalable RDF triple store on top of Accumulo.
-
-- [Overview](overview.md)
-- [Quick Start](quickstart.md)
-- [Load Data](loaddata.md)
-- [Query Data](querydata.md)
-- [Evaluation Table](eval.md)
-- [Pre-computed Joins](loadPrecomputedJoin.md)
-- [Inferencing](infer.md)
-
-# Samples
-- [Typical First Steps](sm-firststeps.md)
-- [Simple Add/Query/Remove Statements](sm-simpleaqr.md)
-- [Sparql query](sm-sparqlquery.md)
-- [Adding Authentication](sm-addauth.md)
-- [Inferencing](sm-infer.md)
-- [Named Graph](sm-namedgraph.md)
-- [Update data](sm-updatedata.md)
-- [Alx](alx.md)
-
-# Development
-- [Building From Source](build-source.md)

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/infer.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/infer.md b/extras/rya.manual/src/site/markdown/infer.md
deleted file mode 100644
index 35b6f14..0000000
--- a/extras/rya.manual/src/site/markdown/infer.md
+++ /dev/null
@@ -1,35 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-# Inferencing
-
-The current inferencing set supported includes:
-
-* rdfs:subClassOf
-* rdfs:subPropertyOf
-* owl:equivalentProperty
-* owl:inverseOf
-* owl:SymmetricProperty
-* owl:TransitiveProperty (* This is implemented, but probably not fully. Still in testing)
-
-Nothing special has to be done outside of making sure that the RdfCloudTripleStore object has the InferencingEngine object set on it and properly configured. This is usually done by default. See the [Query Data Section](querydata.md) for a simple example.
-
-Also, the inferencing engine is set to pull down the latest model every 5 minutes currently (which is configurable). So if you load a new model, a previous RepositoryConnection may not pick up these changes into the Inferencing Engine yet. Getting the InferencingEngine object from the RdfCloudTripleStore and running the `refreshGraph` method can refresh the inferred graph immediately.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md b/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md
deleted file mode 100644
index 220cf03..0000000
--- a/extras/rya.manual/src/site/markdown/loadPrecomputedJoin.md
+++ /dev/null
@@ -1,49 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-# Load Pre-computed Join
-
-A tool has been created to load a pre-computed join.  This tool will generate an index to support a pre-computed join on a user provided SPARQL query, and then register that query within Rya.
-
-
-## Registering a pre-computed join
-
-Generating a pre-computed join is done using Pig to execute a series of Map Reduce jobs.  The index (pre-computed join) is associated with a user defined SPARQL query.  
-  
-To execute the indexing tool, compile and run `mvm.rya.accumulo.pig.IndexWritingTool` 
-with the following seven input arguments: `[hdfsSaveLocation] [sparqlFile] [instance] [cbzk] [user] [password] [rdfTablePrefix]`
-
-
-Options:
-
-* hdfsSaveLocation: a working directory on hdfs for storing interim results
-* sparqlFile: the query to generate a precomputed join for
-* instance: the accumulo instance name
-* cbzk: the accumulo zookeeper name
-* user: the accumulo username
-* password:  the accumulo password for the supplied user
-* rdfTablePrefix : The tables (spo, po, osp) are prefixed with this qualifier. The tables become: (rdf.tablePrefix)spo,(rdf.tablePrefix)po,(rdf.tablePrefix)osp
-
-
-# Using a Pre-computed Join
-
-An example of using a pre-computed join can be referenced in 
-`mvm.rya.indexing.external.ExternalSailExample`

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/loaddata.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/loaddata.md b/extras/rya.manual/src/site/markdown/loaddata.md
deleted file mode 100644
index 2c6bc00..0000000
--- a/extras/rya.manual/src/site/markdown/loaddata.md
+++ /dev/null
@@ -1,142 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-# Load Data
-
-There are a few mechanisms to load data
-
-## Web REST endpoint
-
-The War sets up a Web REST endpoint at `http://server/web.rya/loadrdf` that allows POST data to get loaded into the Rdf Store. This short tutorial will use Java code to post data.
-
-First, you will need data to load and will need to figure out what format that data is in.
-
-For this sample, we will use the following N-Triples:
-
-```
-<http://mynamespace/ProductType1> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://mynamespace/ProductType> .
-<http://mynamespace/ProductType1> <http://www.w3.org/2000/01/rdf-schema#label> "Thing" .
-<http://mynamespace/ProductType1> <http://purl.org/dc/elements/1.1/publisher> <http://mynamespace/Publisher1> .
-```
-
-Save this file somewhere `$RDF_DATA`
-
-Second, use the following Java code to load data to the REST endpoint:
-
-``` JAVA
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.net.URL;
-import java.net.URLConnection;
-
-public class LoadDataServletRun {
-
-    public static void main(String[] args) {
-        try {
-            final InputStream resourceAsStream = Thread.currentThread().getContextClassLoader()
-                    .getResourceAsStream("$RDF_DATA");
-            URL url = new URL("http://server/web.rya/loadrdf" +
-                    "?format=N-Triples" +
-                    "");
-            URLConnection urlConnection = url.openConnection();
-            urlConnection.setRequestProperty("Content-Type", "text/plain");
-            urlConnection.setDoOutput(true);
-
-            final OutputStream os = urlConnection.getOutputStream();
-
-            int read;
-            while((read = resourceAsStream.read()) >= 0) {
-                os.write(read);
-            }
-            resourceAsStream.close();
-            os.flush();
-
-            BufferedReader rd = new BufferedReader(new InputStreamReader(
-                    urlConnection.getInputStream()));
-            String line;
-            while ((line = rd.readLine()) != null) {
-                System.out.println(line);
-            }
-            rd.close();
-            os.close();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
-}
-```
-
-Compile and run this code above, changing the references for $RDF_DATA and the url that your Rdf War is running at.
-
-The default "format" is RDF/XML, but these formats are supported : RDFXML, NTRIPLES, TURTLE, N3, TRIX, TRIG.
-
-## Bulk Loading data
-
-Bulk loading data is done through Map Reduce jobs
-
-### Bulk Load RDF data
-
-This Map Reduce job will read a full file into memory and parse it into statements. The statements are saved into the store. Here is an example for storing in Accumulo:
-
-```
-hadoop jar target/accumulo.rya-3.0.4-SNAPSHOT-shaded.jar mvm.rya.accumulo.mr.fileinput.BulkNtripsInputTool -Dac.zk=localhost:2181 -Dac.instance=accumulo -Dac.username=root -Dac.pwd=secret -Drdf.tablePrefix=triplestore_ -Dio.sort.mb=64 /tmp/temp.ntrips
-```
-
-Options:
-
-- rdf.tablePrefix : The tables (spo, po, osp) are prefixed with this qualifier. The tables become: (rdf.tablePrefix)spo,(rdf.tablePrefix)po,(rdf.tablePrefix)osp
-- ac.* : Accumulo connection parameters
-- rdf.format : See RDFFormat from openrdf, samples include (Trig, N-Triples, RDF/XML)
-- io.sort.mb : Higher the value, the faster the job goes. Just remember that you will need this much ram at least per mapper
-
-The argument is the directory/file to load. This file needs to be loaded into HDFS before running.
-
-## Direct OpenRDF API
-
-Here is some sample code to load data directly through the OpenRDF API. (Loading N-Triples data)
-You will need at least `accumulo.rya-<version>`, `rya.api`, `rya.sail.impl` on the classpath and transitive dependencies. I find that Maven is the easiest way to get a project dependency tree set up.
-
-``` JAVA
-final RdfCloudTripleStore store = new RdfCloudTripleStore();
-AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-AccumuloRyaDAO dao = new AccumuloRdfDAO();
-Connector connector = new ZooKeeperInstance("instance", "zoo1,zoo2,zoo3").getConnector("user", "password");
-dao.setConnector(connector);
-conf.setTablePrefix("rya_");
-dao.setConf(conf);
-store.setRdfDao(dao);
-
-Repository myRepository = new RyaSailRepository(store);
-myRepository.initialize();
-RepositoryConnection conn = myRepository.getConnection();
-
-//load data from file
-final File file = new File("ntriples.ntrips");
-conn.add(new FileInputStream(file), file.getName(),
-        RDFFormat.NTRIPLES, new Resource[]{});
-
-conn.commit();
-
-conn.close();
-myRepository.shutDown();
-```
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/5a03ef61/extras/rya.manual/src/site/markdown/overview.md
----------------------------------------------------------------------
diff --git a/extras/rya.manual/src/site/markdown/overview.md b/extras/rya.manual/src/site/markdown/overview.md
deleted file mode 100644
index 068bd57..0000000
--- a/extras/rya.manual/src/site/markdown/overview.md
+++ /dev/null
@@ -1,26 +0,0 @@
-
-<!--
-
-[comment]: # Licensed to the Apache Software Foundation (ASF) under one
-[comment]: # or more contributor license agreements.  See the NOTICE file
-[comment]: # distributed with this work for additional information
-[comment]: # regarding copyright ownership.  The ASF licenses this file
-[comment]: # to you under the Apache License, Version 2.0 (the
-[comment]: # "License"); you may not use this file except in compliance
-[comment]: # with the License.  You may obtain a copy of the License at
-[comment]: # 
-[comment]: #   http://www.apache.org/licenses/LICENSE-2.0
-[comment]: # 
-[comment]: # Unless required by applicable law or agreed to in writing,
-[comment]: # software distributed under the License is distributed on an
-[comment]: # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-[comment]: # KIND, either express or implied.  See the License for the
-[comment]: # specific language governing permissions and limitations
-[comment]: # under the License.
-
--->
-# Overview
-
-RYA is a scalable RDF Store that is built on top of a Columnar Index Store (such as Accumulo). It is implemented as an extension to OpenRdf to provide easy query mechanisms (SPARQL, SERQL, etc) and Rdf data storage (RDF/XML, NTriples, etc).
-
-RYA stands for RDF y(and) Accumulo.