You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@rya.apache.org by mi...@apache.org on 2015/12/22 17:49:56 UTC
[32/56] [abbrv] incubator-rya git commit: RYA-7 POM and License
Clean-up for Apache Move
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingExample/src/main/java/RyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/RyaDirectExample.java b/extras/indexingExample/src/main/java/RyaDirectExample.java
new file mode 100644
index 0000000..b3e8dae
--- /dev/null
+++ b/extras/indexingExample/src/main/java/RyaDirectExample.java
@@ -0,0 +1,700 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+
+import java.util.List;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.accumulo.geo.GeoConstants;
+import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.MutationsRejectedException;
+import org.apache.accumulo.core.client.TableExistsException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.commons.lang.Validate;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.openrdf.model.URI;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.LiteralImpl;
+import org.openrdf.model.impl.URIImpl;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.QueryResultHandlerException;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandler;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.Update;
+import org.openrdf.query.UpdateExecutionException;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.sail.Sail;
+import org.openrdf.sail.SailException;
+
+public class RyaDirectExample {
+ private static final Logger log = Logger.getLogger(RyaDirectExample.class);
+
+ //
+ // Connection configuration parameters
+ //
+
+ private static final boolean USE_MOCK_INSTANCE = true;
+ private static final boolean PRINT_QUERIES = true;
+ private static final String INSTANCE = "instance";
+ private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
+ private static final String AUTHS = "";
+
+
+
+ public static void main(String[] args) throws Exception {
+ Configuration conf = getConf();
+ conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
+
+ log.info("Creating the tables as root.");
+// createTables(addRootConf(conf), conf);
+
+ SailRepository repository = null;
+ SailRepositoryConnection conn = null;
+
+ try {
+ log.info("Connecting to Indexing Sail Repository.");
+
+ Sail extSail = RyaSailFactory.getInstance(conf);
+ repository = new SailRepository(extSail);
+ repository.initialize();
+ conn = repository.getConnection();
+
+ createPCJ(conn);
+
+ long start = System.currentTimeMillis();
+ log.info("Running SPARQL Example: Add and Delete");
+ testAddAndDelete(conn);
+ log.info("Running SAIL/SPARQL Example: PCJ Search");
+ testPCJSearch(conn);
+ log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
+ testAddAndTemporalSearchWithPCJ(conn);
+ log.info("Running SAIL/SPARQL Example: Add and Free Text Search with PCJ");
+ testAddAndFreeTextSearchWithPCJ(conn);
+ log.info("Running SPARQL Example: Add Point and Geo Search with PCJ");
+ testAddPointAndWithinSearchWithPCJ(conn);
+ log.info("Running SPARQL Example: Temporal, Freetext, and Geo Search");
+ testTemporalFreeGeoSearch(conn);
+ log.info("Running SPARQL Example: Geo, Freetext, and PCJ Search");
+ testGeoFreetextWithPCJSearch(conn);
+
+ log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
+ } finally {
+ log.info("Shutting down");
+ closeQuietly(conn);
+ closeQuietly(repository);
+ }
+ }
+
+ private static void closeQuietly(SailRepository repository) {
+ if (repository != null) {
+ try {
+ repository.shutDown();
+ } catch (RepositoryException e) {
+ // quietly absorb this exception
+ }
+ }
+ }
+
+ private static void closeQuietly(SailRepositoryConnection conn) {
+ if (conn != null) {
+ try {
+ conn.close();
+ } catch (RepositoryException e) {
+ // quietly absorb this exception
+ }
+ }
+ }
+
+ private static Configuration getConf() {
+
+ AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+
+ conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE);
+ conf.set(ConfigUtils.USE_PCJ, "true");
+ conf.set(ConfigUtils.USE_GEO, "true");
+ conf.set(ConfigUtils.USE_FREETEXT, "true");
+ conf.set(ConfigUtils.USE_TEMPORAL, "true");
+ conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX);
+ conf.set(ConfigUtils.CLOUDBASE_USER, "root");
+ conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "");
+ conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE);
+ conf.setInt(ConfigUtils.NUM_PARTITIONS, 3);
+ conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS);
+
+ // only geo index statements with geo:asWKT predicates
+ conf.set(ConfigUtils.GEO_PREDICATES_LIST, GeoConstants.GEO_AS_WKT.stringValue());
+ return conf;
+ }
+
+ public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
+ RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
+ AccumuloException, AccumuloSecurityException, TableNotFoundException {
+
+ // Add data
+ String query = "INSERT DATA\n"//
+ + "{ GRAPH <http://updated/test> {\n"//
+ + " <http://acme.com/people/Mike> " //
+ + " <http://acme.com/actions/likes> \"A new book\" ;\n"//
+ + " <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
+
+ log.info("Performing Query");
+
+ Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+ update.execute();
+
+ query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
+ CountingResultHandler resultHandler = new CountingResultHandler();
+ TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ tupleQuery.evaluate(resultHandler);
+ log.info("Result count : " + resultHandler.getCount());
+
+ Validate.isTrue(resultHandler.getCount() == 2);
+ resultHandler.resetCount();
+
+ // Delete Data
+ query = "DELETE DATA\n" //
+ + "{ GRAPH <http://updated/test> {\n"
+ + " <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
+ + " <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
+
+ update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+ update.execute();
+
+ query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ tupleQuery.evaluate(resultHandler);
+ log.info("Result count : " + resultHandler.getCount());
+
+ Validate.isTrue(resultHandler.getCount() == 0);
+ }
+
+
+ private static void testPCJSearch(SailRepositoryConnection conn) throws Exception {
+
+ String queryString;
+ TupleQuery tupleQuery;
+ CountingResultHandler tupleHandler;
+
+ // ///////////// search for bob
+ queryString = "SELECT ?e ?c ?l ?o " //
+ + "{" //
+ + " ?e a ?c . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + "}";//
+
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 1);
+
+ // ///////////// search for bob
+ queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ + "SELECT ?e ?c ?l ?o " //
+ + "{" //
+ + " ?c a ?e . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + "}";//
+
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 2);
+
+ }
+
+
+
+
+ private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
+
+ // create some resources and literals to make statements out of
+
+ String sparqlInsert = "PREFIX time: <http://www.w3.org/2006/time#>\n"
+ + "INSERT DATA {\n" //
+ + "_:eventz a time:Instant ;\n"
+ + " time:inXSDDateTime '2001-01-01T01:01:01-08:00' ;\n" // one second
+ + " time:inXSDDateTime '2001-01-01T04:01:02.000-05:00'^^<http://www.w3.org/2001/XMLSchema#dateTime> ;\n" // 2 seconds
+ + " time:inXSDDateTime \"2001-01-01T01:01:03-08:00\" ;\n" // 3 seconds
+ + " time:inXSDDateTime '2001-01-01T01:01:04-08:00' ;\n" // 4 seconds
+ + " time:inXSDDateTime '2001-01-01T09:01:05Z' ;\n"
+ + " time:inXSDDateTime '2006-01-01' ;\n"
+ + " time:inXSDDateTime '2007-01-01' ;\n"
+ + " time:inXSDDateTime '2008-01-01' ; .\n"
+ + "}";
+
+ Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
+ update.execute();
+
+ // Find all stored dates.
+ String queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+ + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ + "SELECT ?event ?time \n" //
+ + "WHERE { \n"
+ + " ?event time:inXSDDateTime ?time . \n"//
+ + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+ + "}";//
+
+
+
+ TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ CountingResultHandler tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 5);
+
+ // Find all stored dates.
+ queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+ + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ + "SELECT ?event ?time \n" //
+ + "WHERE { \n"
+ + " ?event time:inXSDDateTime ?time . \n"//
+ + " ?event a time:Instant . \n"//
+ + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+ + "}";//
+
+
+
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 5);
+
+
+ // Find all stored dates.
+ queryString = "PREFIX time: <http://www.w3.org/2006/time#> \n"//
+ + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> \n"//
+ + "SELECT ?event ?time ?e ?c ?l ?o \n" //
+ + "WHERE { \n"
+ + " ?e a ?c . \n"//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . \n"//
+ + " ?e <uri:talksTo> ?o . \n"//
+ + " ?event a time:Instant . \n"//
+ + " ?event time:inXSDDateTime ?time . \n"//
+ + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+ + "}";//
+
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 5);
+ }
+
+
+
+
+
+
+ private static void testAddAndFreeTextSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
+ // add data to the repository using the SailRepository add methods
+ ValueFactory f = conn.getValueFactory();
+ URI person = f.createURI("http://example.org/ontology/Person");
+
+ String uuid;
+
+ uuid = "urn:people:alice";
+ conn.add(f.createURI(uuid), RDF.TYPE, person);
+ conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Alice Palace Hose", f.createURI("xsd:string")));
+
+ uuid = "urn:people:bobss";
+ conn.add(f.createURI(uuid), RDF.TYPE, person);
+ conn.add(f.createURI(uuid), RDFS.LABEL, f.createLiteral("Bob Snob Hose", "en"));
+
+ String queryString;
+ TupleQuery tupleQuery;
+ CountingResultHandler tupleHandler;
+
+ // ///////////// search for alice
+ queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ + "SELECT ?person ?match ?e ?c ?l ?o " //
+ + "{" //
+ + " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ + " FILTER(fts:text(?match, \"pal*\")) " //
+ + "}";//
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 1);
+
+
+ // ///////////// search for alice and bob
+ queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ + "SELECT ?person ?match " //
+ + "{" //
+ + " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ + " ?person a <http://example.org/ontology/Person> . "//
+ + " FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
+ + "}";//
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 2);
+
+ // ///////////// search for alice and bob
+ queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ + "SELECT ?person ?match " //
+ + "{" //
+ + " ?person a <http://example.org/ontology/Person> . "//
+ + " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ + " FILTER(fts:text(?match, \"(alice | bob) *SE\")) " //
+ + " FILTER(fts:text(?match, \"pal*\")) " //
+ + "}";//
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 1);
+
+
+ // ///////////// search for bob
+ queryString = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ + "SELECT ?person ?match ?e ?c ?l ?o " //
+ + "{" //
+ + " ?e a ?c . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + " ?person a <http://example.org/ontology/Person> . "//
+ + " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ + " FILTER(fts:text(?match, \"!alice & hose\")) " //
+ + "}";//
+
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 1);
+ }
+
+
+
+ private static void testAddPointAndWithinSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
+
+ String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ + "INSERT DATA { " //
+ + " <urn:feature> a geo:Feature ; " //
+ + " geo:hasGeometry [ " //
+ + " a geo:Point ; " //
+ + " geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "//
+ + " ] . " //
+ + "}";
+
+ Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
+ u.execute();
+
+ String queryString;
+ TupleQuery tupleQuery;
+ CountingResultHandler tupleHandler;
+
+ // point outside search ring
+ queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ + "SELECT ?feature ?point ?wkt " //
+ + "{" //
+ + " ?feature a geo:Feature . "//
+ + " ?feature geo:hasGeometry ?point . "//
+ + " ?point a geo:Point . "//
+ + " ?point geo:asWKT ?wkt . "//
+ + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
+ + "}";//
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 0);
+
+ // point inside search ring
+ queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
+ + "{" //
+ + " ?feature a ?e . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + " ?feature a geo:Feature . "//
+ + " ?feature geo:hasGeometry ?point . "//
+ + " ?point a geo:Point . "//
+ + " ?point geo:asWKT ?wkt . "//
+ + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+ + "}";//
+
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 1);
+
+
+ // point inside search ring with Pre-Computed Join
+ queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ + "SELECT ?feature ?point ?wkt ?e ?l ?o" //
+ + "{" //
+ + " ?feature a ?e . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + " ?feature a geo:Feature . "//
+ + " ?feature geo:hasGeometry ?point . "//
+ + " ?point a geo:Point . "//
+ + " ?point geo:asWKT ?wkt . "//
+ + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+ + "}";//
+
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs
+
+ // point outside search ring with PCJ
+ queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ + "SELECT ?feature ?point ?wkt ?e ?l ?o " //
+ + "{" //
+ + " ?feature a ?e . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + " ?feature a geo:Feature . "//
+ + " ?feature geo:hasGeometry ?point . "//
+ + " ?point a geo:Point . "//
+ + " ?point geo:asWKT ?wkt . "//
+ + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
+ + "}";//
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 0);
+
+ // point inside search ring with different Pre-Computed Join
+ queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o " //
+ + "{" //
+ + " ?e a ?c . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + " ?feature a geo:Feature . "//
+ + " ?feature geo:hasGeometry ?point . "//
+ + " ?point a geo:Point . "//
+ + " ?point geo:asWKT ?wkt . "//
+ + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+ + "}";//
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 1);
+ }
+
+
+ private static void testTemporalFreeGeoSearch(SailRepositoryConnection conn) throws MalformedQueryException,
+ RepositoryException, UpdateExecutionException, TupleQueryResultHandlerException, QueryEvaluationException {
+
+
+ String queryString;
+ TupleQuery tupleQuery;
+ CountingResultHandler tupleHandler;
+
+ // ring containing point
+ queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ + "PREFIX time: <http://www.w3.org/2006/time#> "//
+ + "PREFIX tempo: <tag:rya-rdf.org,2015:temporal#> "//
+ + "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ + "SELECT ?feature ?point ?wkt ?event ?time ?person ?match" //
+ + "{" //
+ + " ?event a time:Instant . \n"//
+ + " ?event time:inXSDDateTime ?time . \n"//
+ + " FILTER(tempo:after(?time, '2001-01-01T01:01:03-08:00') ) \n"// after 3 seconds
+ + " ?feature a geo:Feature . "//
+ + " ?feature geo:hasGeometry ?point . "//
+ + " ?point a geo:Point . "//
+ + " ?point geo:asWKT ?wkt . "//
+ + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)). " //
+ + " ?person a <http://example.org/ontology/Person> . "//
+ + " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ + " FILTER(fts:text(?match, \"pal*\")) " //
+ + "}";//
+
+
+
+ tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+
+ tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 5);
+
+ }
+
+
+
+ private static void testGeoFreetextWithPCJSearch(SailRepositoryConnection conn) throws MalformedQueryException,
+ RepositoryException, TupleQueryResultHandlerException, QueryEvaluationException {
+ // ring outside point
+ String queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
+ + "PREFIX fts: <http://rdf.useekm.com/fts#> "//
+ + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
+ + "SELECT ?feature ?point ?wkt ?e ?c ?l ?o ?person ?match " //
+ + "{" //
+ + " ?person a <http://example.org/ontology/Person> . "//
+ + " ?person <http://www.w3.org/2000/01/rdf-schema#label> ?match . "//
+ + " FILTER(fts:text(?match, \"!alice & hose\")) " //
+ + " ?e a ?c . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + " ?feature a geo:Feature . "//
+ + " ?feature geo:hasGeometry ?point . "//
+ + " ?point a geo:Point . "//
+ + " ?point geo:asWKT ?wkt . "//
+ + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+ + "}";//
+ TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+ CountingResultHandler tupleHandler = new CountingResultHandler();
+ tupleQuery.evaluate(tupleHandler);
+ log.info("Result count : " + tupleHandler.getCount());
+ Validate.isTrue(tupleHandler.getCount() == 1);
+ }
+
+
+
+ private static void createPCJ(SailRepositoryConnection conn)
+ throws RepositoryException, AccumuloException, AccumuloSecurityException, TableExistsException {
+
+ String queryString1 = ""//
+ + "SELECT ?e ?c ?l ?o " //
+ + "{" //
+ + " ?c a ?e . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + "}";//
+
+ String queryString2 = ""//
+ + "SELECT ?e ?c ?l ?o " //
+ + "{" //
+ + " ?e a ?c . "//
+ + " ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
+ + " ?e <uri:talksTo> ?o . "//
+ + "}";//
+
+
+ URI obj,subclass,talksTo;
+ URI person = new URIImpl("urn:people:alice");
+ URI feature = new URIImpl("urn:feature");
+ URI sub = new URIImpl("uri:entity");
+ subclass = new URIImpl("uri:class");
+ obj = new URIImpl("uri:obj");
+ talksTo = new URIImpl("uri:talksTo");
+
+ conn.add(person, RDF.TYPE, sub);
+ conn.add(feature, RDF.TYPE, sub);
+ conn.add(sub, RDF.TYPE, subclass);
+ conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
+ conn.add(sub, talksTo, obj);
+
+ AccumuloIndexSet ais1 = null;
+ AccumuloIndexSet ais2 = null;
+ String tablename1 = RYA_TABLE_PREFIX + "INDEX_1";
+ String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
+
+ Connector accCon = new MockInstance(INSTANCE).getConnector("root", new PasswordToken("".getBytes()));
+ accCon.tableOperations().create(tablename1);
+ accCon.tableOperations().create(tablename2);
+
+ try {
+ ais1 = new AccumuloIndexSet(queryString1, conn, accCon, tablename1);
+ ais2 = new AccumuloIndexSet(queryString2, conn, accCon, tablename2);
+ } catch (MalformedQueryException e) {
+ e.printStackTrace();
+ } catch (SailException e) {
+ e.printStackTrace();
+ } catch (QueryEvaluationException e) {
+ e.printStackTrace();
+ } catch (MutationsRejectedException e) {
+ e.printStackTrace();
+ } catch (TableNotFoundException e) {
+ e.printStackTrace();
+ }
+
+ }
+
+
+ private static class CountingResultHandler implements TupleQueryResultHandler {
+ private int count = 0;
+
+ public int getCount() {
+ return count;
+ }
+
+ public void resetCount() {
+ this.count = 0;
+ }
+
+ @Override
+ public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
+ }
+
+ @Override
+ public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
+ count++;
+ System.out.println(arg0);
+ }
+
+ @Override
+ public void endQueryResult() throws TupleQueryResultHandlerException {
+ }
+
+ @Override
+ public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
+ // TODO Auto-generated method stub
+
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat b/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
new file mode 100644
index 0000000..a89e3d1
--- /dev/null
+++ b/extras/indexingExample/src/main/scripts/RunRyaDirectExample.bat
@@ -0,0 +1,41 @@
+@echo off
+rem Licensed to the Apache Software Foundation (ASF) under one
+rem or more contributor license agreements. See the NOTICE file
+rem distributed with this work for additional information
+rem regarding copyright ownership. The ASF licenses this file
+rem to you under the Apache License, Version 2.0 (the
+rem "License"); you may not use this file except in compliance
+rem with the License. You may obtain a copy of the License at
+rem
+rem http://www.apache.org/licenses/LICENSE-2.0
+rem
+rem Unless required by applicable law or agreed to in writing,
+rem software distributed under the License is distributed on an
+rem "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+rem KIND, either express or implied. See the License for the
+rem specific language governing permissions and limitations
+rem under the License.
+SET CP=
+
+REM Check to see if javac is on the path
+where /Q javac
+IF %ERRORLEVEL% NEQ 0 goto :NO_JAVAC
+
+
+for /f %%f in ('DIR /b .\lib\*.jar') do call :append .\lib\%%f
+
+javac -cp "%CP%" RyaDirectExample.java
+java -cp "%CP%" RyaDirectExample
+
+goto :end
+
+:append
+@echo off
+SET CP=%CP%%1;
+goto :end
+
+:NO_JAVAC
+echo ERROR: Could not find javac
+goto :end
+
+:end
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/pom.xml
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/pom.xml b/extras/indexingSailExample/pom.xml
deleted file mode 100644
index d126457..0000000
--- a/extras/indexingSailExample/pom.xml
+++ /dev/null
@@ -1,80 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <parent>
- <groupId>mvm.rya</groupId>
- <artifactId>rya.extras</artifactId>
- <version>3.2.10-SNAPSHOT</version>
- </parent>
-
- <modelVersion>4.0.0</modelVersion>
- <name>${project.groupId}.${project.artifactId}</name>
- <artifactId>rya.indexingSail.example</artifactId>
-
- <dependencies>
- <dependency>
- <groupId>mvm.rya</groupId>
- <artifactId>rya.prospector</artifactId>
- </dependency>
-
-
- <dependency>
- <groupId>mvm.rya</groupId>
- <artifactId>mongodb.rya</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>mvm.rya</groupId>
- <artifactId>rya.indexing</artifactId>
- <version>${project.version}</version>
- </dependency>
- <dependency>
- <groupId>mvm.rya</groupId>
- <artifactId>rya.indexing</artifactId>
- <classifier>accumulo-server</classifier>
- <version>${project.version}</version>
- </dependency>
-
- <dependency>
- <groupId>mvm.rya</groupId>
- <artifactId>rya.indexing</artifactId>
- <classifier>map-reduce</classifier>
- <version>${project.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.accumulo</groupId>
- <artifactId>accumulo-core</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.thrift</groupId>
- <artifactId>libthrift</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.locationtech.geomesa</groupId>
- <artifactId>geomesa-distributed-runtime</artifactId>
- <version>${geomesa.version}</version>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.4</version>
- <configuration>
- <descriptors>
- <descriptor>src/main/assembly/assembly.xml</descriptor>
- </descriptors>
- </configuration>
- <executions>
- <execution>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/assembly/assembly.xml b/extras/indexingSailExample/src/main/assembly/assembly.xml
deleted file mode 100644
index 047ea5f..0000000
--- a/extras/indexingSailExample/src/main/assembly/assembly.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
-
- <id>distribution</id>
- <formats>
- <format>zip</format>
- </formats>
- <includeBaseDirectory>false</includeBaseDirectory>
-
- <dependencySets>
- <dependencySet>
- <outputDirectory>accumulo/lib/ext</outputDirectory>
- <includes>
- <include>mvm.rya:rya.indexing:*:accumulo-server</include>
- <include>org.locationtech.geomesa:geomesa-distributed-runtime:*</include>
- </includes>
- </dependencySet>
- <dependencySet>
- <outputDirectory>map-reduce</outputDirectory>
- <includes>
- <include>mvm.rya:rya.indexing:*:map-reduce</include>
- </includes>
- </dependencySet>
- <dependencySet>
- <outputDirectory>dist/lib</outputDirectory>
- <includes>
- <include>*</include>
- </includes>
- <excludes>
- <!-- Do not include the example jar. Example batch script builds the example -->
- <exclude>mvm.rya:rya.indexingSail.example</exclude>
-
- <!-- Do not include the MR or Accumulo Server builds -->
- <exclude>mvm.rya:rya.indexing:*:accumulo-server</exclude>
- <exclude>mvm.rya:rya.indexing:*:map-reduce</exclude>
- </excludes>
- <scope>test</scope>
- </dependencySet>
- </dependencySets>
- <files>
- <file>
- <source>src/main/scripts/RunRyaDirectExample.bat</source>
- <outputDirectory>dist</outputDirectory>
- </file>
- <file>
- <source>src/main/java/RyaDirectExample.java</source>
- <outputDirectory>dist</outputDirectory>
- </file>
- </files>
-</assembly>
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/src/main/java/EntityDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/java/EntityDirectExample.java b/extras/indexingSailExample/src/main/java/EntityDirectExample.java
deleted file mode 100644
index 408c754..0000000
--- a/extras/indexingSailExample/src/main/java/EntityDirectExample.java
+++ /dev/null
@@ -1,292 +0,0 @@
-
-
-import java.util.List;
-
-import mvm.rya.accumulo.AccumuloRdfConfiguration;
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-
-import org.apache.accumulo.core.client.AccumuloException;
-import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.commons.lang.Validate;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.QueryResultHandlerException;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandler;
-import org.openrdf.query.TupleQueryResultHandlerException;
-import org.openrdf.query.Update;
-import org.openrdf.query.UpdateExecutionException;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-import org.openrdf.sail.Sail;
-
-public class EntityDirectExample {
- private static final Logger log = Logger.getLogger(EntityDirectExample.class);
-
- //
- // Connection configuration parameters
- //
-
- private static final boolean USE_MOCK_INSTANCE = true;
- private static final boolean PRINT_QUERIES = true;
- private static final String INSTANCE = "instance";
- private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
- private static final String AUTHS = "U";
-
- public static void main(String[] args) throws Exception {
- Configuration conf = getConf();
- conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-
- log.info("Creating the tables as root.");
- SailRepository repository = null;
- SailRepositoryConnection conn = null;
-
- try {
- log.info("Connecting to Indexing Sail Repository.");
-
- Sail extSail = RyaSailFactory.getInstance(conf);
- repository = new SailRepository(extSail);
- repository.initialize();
- conn = repository.getConnection();
-
- log.info("Running SPARQL Example: Add and Delete");
- testAddAndDelete(conn);
- log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
- testAddAndTemporalSearchWithPCJ(conn);
-
- } finally {
- log.info("Shutting down");
- closeQuietly(conn);
- closeQuietly(repository);
- }
- }
-
- private static void closeQuietly(SailRepository repository) {
- if (repository != null) {
- try {
- repository.shutDown();
- } catch (RepositoryException e) {
- // quietly absorb this exception
- }
- }
- }
-
- private static void closeQuietly(SailRepositoryConnection conn) {
- if (conn != null) {
- try {
- conn.close();
- } catch (RepositoryException e) {
- // quietly absorb this exception
- }
- }
- }
-
-
-
-
-
- public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException,
- RepositoryException, UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException,
- AccumuloException, AccumuloSecurityException, TableNotFoundException {
-
- // Add data
- String query = "INSERT DATA\n"//
- + "{ GRAPH <http://updated/test> {\n"//
- + " <http://acme.com/people/Mike> " //
- + " <http://acme.com/actions/likes> \"A new book\" ;\n"//
- + " <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
-
- log.info("Performing Query");
-
- Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
- update.execute();
-
- query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
- + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
- CountingResultHandler resultHandler = new CountingResultHandler();
- TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- tupleQuery.evaluate(resultHandler);
- log.info("Result count : " + resultHandler.getCount());
-
- Validate.isTrue(resultHandler.getCount() == 1);
- resultHandler.resetCount();
-
- //TODO delete currently not implemented in AccumuloRyaDAO for
-// // Delete Data
-// query = "DELETE DATA\n" //
-// + "{ GRAPH <http://updated/test> {\n"
-// + " <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
-// + " <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
-//
-// update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
-// update.execute();
-//
-// query = "select ?x {GRAPH <http://updated/test> {?x <http://acme.com/actions/likes> \"A new book\" . "//
-// + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
-// tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
-// tupleQuery.evaluate(resultHandler);
-// log.info("Result count : " + resultHandler.getCount());
-//
-// Validate.isTrue(resultHandler.getCount() == 0);
- }
-
-
-
-
-
- private static void testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception {
-
- // create some resources and literals to make statements out of
-
- String sparqlInsert = "PREFIX pref: <http://www.model/pref#> \n"
- + "INSERT DATA {\n" //
- + "<urn:Bob> a pref:Person ;\n" //
- + " pref:hasProperty1 'property1' ;\n" // one second
- + " pref:hasProperty2 'property2' ;\n" // 2 seconds
- + " pref:hasProperty3 'property3' .\n" // 3 seconds
- + "<urn:Fred> a pref:Person ; \n" //
- + " pref:hasProperty4 'property4' ; \n" //
- + " pref:hasProperty5 'property5' ; \n" //
- + "}";
-
- Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
- update.execute();
-
- String queryString = "PREFIX pref: <http://www.model/pref#> \n" //
- + "SELECT ?x ?z \n" //
- + "WHERE { \n"
- + " ?x a ?z. \n"
- + " ?x pref:hasProperty1 'property1' . \n"//
- + " ?x pref:hasProperty2 'property2' . \n"//
- + " ?x pref:hasProperty3 'property3' . \n"//
- + "}";//
-
-
-
- TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
- CountingResultHandler tupleHandler = new CountingResultHandler();
- tupleQuery.evaluate(tupleHandler);
- log.info("Result count : " + tupleHandler.getCount());
- Validate.isTrue(tupleHandler.getCount() == 1);
- Validate.isTrue(tupleHandler.getBsSize() == 2);
-
- queryString = "PREFIX pref: <http://www.model/pref#> \n" //
- + "SELECT ?x ?w ?z \n" //
- + "WHERE { \n"
- + " ?x a ?z. \n"
- + " ?x pref:hasProperty4 'property4' . \n"//
- + " ?x pref:hasProperty5 ?w . \n"//
- + "}";//
-
-
- tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
- tupleHandler = new CountingResultHandler();
- tupleQuery.evaluate(tupleHandler);
- log.info("Result count : " + tupleHandler.getCount());
- Validate.isTrue(tupleHandler.getCount() == 1);
- Validate.isTrue(tupleHandler.getBsSize() == 3);
-
-
- queryString = "PREFIX pref: <http://www.model/pref#> "
- + "SELECT ?v ?w ?x ?y ?z "
- + "WHERE { "
- + " ?w a ?z . "
- + " ?w pref:hasProperty1 ?v . "
- + " ?w pref:hasProperty2 'property2' . "
- + " ?w pref:hasProperty3 'property3' . "
- + " ?x a ?z . "
- + " ?x pref:hasProperty4 'property4' . "
- + " ?x pref:hasProperty5 ?y . "
- + "}";
-
-
-
- tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
- tupleHandler = new CountingResultHandler();
- tupleQuery.evaluate(tupleHandler);
- log.info("Result count : " + tupleHandler.getCount());
- Validate.isTrue(tupleHandler.getCount() == 1);
- Validate.isTrue(tupleHandler.getBsSize() == 5);
-
- }
-
-
- private static Configuration getConf() {
-
- AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
-
- conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE);
- conf.set(ConfigUtils.USE_ENTITY, "true");
- conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX);
- conf.set(ConfigUtils.ENTITY_TABLENAME, RYA_TABLE_PREFIX + "entity");
- conf.set(ConfigUtils.CLOUDBASE_USER, "root");
- conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "");
- conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE);
- conf.setInt(ConfigUtils.NUM_PARTITIONS, 3);
- conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS);
-
- return conf;
- }
-
-
- private static class CountingResultHandler implements TupleQueryResultHandler {
- private int count = 0;
- private int bindingSize = 0;
- private boolean bsSizeSet = false;
-
- public int getCount() {
- return count;
- }
-
- public int getBsSize() {
- return bindingSize;
- }
-
- public void resetBsSize() {
- bindingSize = 0;
- bsSizeSet = false;
- }
-
- public void resetCount() {
- this.count = 0;
- }
-
- @Override
- public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
- }
-
- @Override
- public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
- count++;
- if(!bsSizeSet) {
- bindingSize = arg0.size();
- bsSizeSet = true;
- }
- System.out.println(arg0);
- }
-
- @Override
- public void endQueryResult() throws TupleQueryResultHandlerException {
- }
-
- @Override
- public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
- // TODO Auto-generated method stub
-
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/80faf06d/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java b/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
deleted file mode 100644
index 3f02fb2..0000000
--- a/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
+++ /dev/null
@@ -1,288 +0,0 @@
-import java.util.List;
-
-import mvm.rya.api.RdfCloudTripleStoreConfiguration;
-import mvm.rya.indexing.RyaSailFactory;
-import mvm.rya.indexing.accumulo.ConfigUtils;
-import mvm.rya.mongodb.MongoDBRdfConfiguration;
-
-import org.apache.commons.lang.Validate;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.log4j.Logger;
-import org.openrdf.model.Namespace;
-import org.openrdf.query.BindingSet;
-import org.openrdf.query.MalformedQueryException;
-import org.openrdf.query.QueryEvaluationException;
-import org.openrdf.query.QueryLanguage;
-import org.openrdf.query.QueryResultHandlerException;
-import org.openrdf.query.TupleQuery;
-import org.openrdf.query.TupleQueryResultHandler;
-import org.openrdf.query.TupleQueryResultHandlerException;
-import org.openrdf.query.Update;
-import org.openrdf.query.UpdateExecutionException;
-import org.openrdf.repository.RepositoryException;
-import org.openrdf.repository.RepositoryResult;
-import org.openrdf.repository.sail.SailRepository;
-import org.openrdf.repository.sail.SailRepositoryConnection;
-import org.openrdf.sail.Sail;
-
-public class MongoRyaDirectExample {
- private static final Logger log = Logger.getLogger(MongoRyaDirectExample.class);
-
- //
- // Connection configuration parameters
- //
-
- private static final boolean PRINT_QUERIES = true;
- private static final String MONGO_DB = "rya";
- private static final String MONGO_COLL_PREFIX = "rya_";
-
- public static void main(String[] args) throws Exception {
- Configuration conf = getConf();
- conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
-
- SailRepository repository = null;
- SailRepositoryConnection conn = null;
- try {
- log.info("Connecting to Indexing Sail Repository.");
- Sail sail = RyaSailFactory.getInstance(conf);
- repository = new SailRepository(sail);
- repository.initialize();
- conn = repository.getConnection();
-
- long start = System.currentTimeMillis();
- log.info("Running SPARQL Example: Add and Delete");
- testAddAndDelete(conn);
- testAddAndDeleteNoContext(conn);
- testAddNamespaces(conn);
- testAddPointAndWithinSearch(conn);
-
- log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
- } finally {
- log.info("Shutting down");
- closeQuietly(conn);
- closeQuietly(repository);
- }
- }
-
- private static void testAddPointAndWithinSearch(SailRepositoryConnection conn) throws Exception {
-
- String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
- + "INSERT DATA { " //
- + " <urn:feature> a geo:Feature ; " //
- + " geo:hasGeometry [ " //
- + " a geo:Point ; " //
- + " geo:asWKT \"Point(-77.03524 38.889468)\"^^geo:wktLiteral "//
- + " ] . " //
- + "}";
-
- Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
- u.execute();
-
- String queryString;
- TupleQuery tupleQuery;
- CountingResultHandler tupleHandler;
-
- // ring containing point
- queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
- + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
- + "SELECT ?feature ?point ?wkt " //
- + "{" //
- + " ?feature a geo:Feature . "//
- + " ?feature geo:hasGeometry ?point . "//
- + " ?point a geo:Point . "//
- + " ?point geo:asWKT ?wkt . "//
- + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
- + "}";//
- tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-
- tupleHandler = new CountingResultHandler();
- tupleQuery.evaluate(tupleHandler);
- log.info("Result count : " + tupleHandler.getCount());
- Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from during previous runs
-
- // ring outside point
- queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#> "//
- + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/> "//
- + "SELECT ?feature ?point ?wkt " //
- + "{" //
- + " ?feature a geo:Feature . "//
- + " ?feature geo:hasGeometry ?point . "//
- + " ?point a geo:Point . "//
- + " ?point geo:asWKT ?wkt . "//
- + " FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
- + "}";//
- tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
-
- tupleHandler = new CountingResultHandler();
- tupleQuery.evaluate(tupleHandler);
- log.info("Result count : " + tupleHandler.getCount());
- Validate.isTrue(tupleHandler.getCount() == 0);
- }
-
- private static void closeQuietly(SailRepository repository) {
- if (repository != null) {
- try {
- repository.shutDown();
- } catch (RepositoryException e) {
- // quietly absorb this exception
- }
- }
- }
-
- private static void closeQuietly(SailRepositoryConnection conn) {
- if (conn != null) {
- try {
- conn.close();
- } catch (RepositoryException e) {
- // quietly absorb this exception
- }
- }
- }
-
- private static Configuration getConf() {
-
- Configuration conf = new Configuration();
- conf.set(ConfigUtils.USE_MONGO, "true");
- conf.set(MongoDBRdfConfiguration.USE_TEST_MONGO, "true");
- conf.set(MongoDBRdfConfiguration.MONGO_DB_NAME, MONGO_DB);
- conf.set(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, MONGO_COLL_PREFIX);
- conf.set(ConfigUtils.GEO_PREDICATES_LIST, "http://www.opengis.net/ont/geosparql#asWKT");
- conf.set(ConfigUtils.USE_GEO, "true");
- conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, MONGO_COLL_PREFIX);
-
- return conf;
- }
-
-
-
- public static void testAddAndDelete(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
- UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
-
- // Add data
- String query = "INSERT DATA\n"//
- + "{ GRAPH <http://updated/test> {\n"//
- + " <http://acme.com/people/Mike> " //
- + " <http://acme.com/actions/likes> \"A new book\" ;\n"//
- + " <http://acme.com/actions/likes> \"Avocados\" .\n" + "} }";
-
- log.info("Performing Query");
-
- Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
- update.execute();
-
- query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
- CountingResultHandler resultHandler = new CountingResultHandler();
- TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- tupleQuery.evaluate(resultHandler);
- log.info("Result count : " + resultHandler.getCount());
-
- Validate.isTrue(resultHandler.getCount() == 2);
-
- resultHandler.resetCount();
-
- // Delete Data
- query = "DELETE DATA\n" //
- + "{ GRAPH <http://updated/test> {\n"
- + " <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
- + " <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
-
- update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
- update.execute();
-
- query = "select ?p ?o { GRAPH <http://updated/test> {<http://acme.com/people/Mike> ?p ?o . }}";
- tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- tupleQuery.evaluate(resultHandler);
- log.info("Result count : " + resultHandler.getCount());
-
- Validate.isTrue(resultHandler.getCount() == 0);
- }
-
- public static void testAddNamespaces(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
- UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
-
- conn.setNamespace("rya", "http://rya.com");
- RepositoryResult<Namespace> results = conn.getNamespaces();
- for (Namespace space : results.asList()){
- System.out.println(space.getName() + ", " + space.getPrefix());
- }
- }
-
- public static void testAddAndDeleteNoContext(SailRepositoryConnection conn) throws MalformedQueryException, RepositoryException,
- UpdateExecutionException, QueryEvaluationException, TupleQueryResultHandlerException {
-
- // Add data
- String query = "INSERT DATA\n"//
- + "{ \n"//
- + " <http://acme.com/people/Mike> " //
- + " <http://acme.com/actions/likes> \"A new book\" ;\n"//
- + " <http://acme.com/actions/likes> \"Avocados\" .\n" + " }";
-
- log.info("Performing Query");
-
- Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
- update.execute();
-
- query = "select ?p ?o {<http://acme.com/people/Mike> ?p ?o . }";
- CountingResultHandler resultHandler = new CountingResultHandler();
- TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- tupleQuery.evaluate(resultHandler);
- log.info("Result count : " + resultHandler.getCount());
-
- Validate.isTrue(resultHandler.getCount() == 2);
-
- resultHandler.resetCount();
-
- // Delete Data
- query = "DELETE DATA\n" //
- + "{ \n"
- + " <http://acme.com/people/Mike> <http://acme.com/actions/likes> \"A new book\" ;\n"
- + " <http://acme.com/actions/likes> \"Avocados\" .\n" + "}";
-
- update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
- update.execute();
-
- query = "select ?p ?o { {<http://acme.com/people/Mike> ?p ?o . }}";
- tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
- tupleQuery.evaluate(resultHandler);
- log.info("Result count : " + resultHandler.getCount());
-
- Validate.isTrue(resultHandler.getCount() == 0);
- }
-
- private static class CountingResultHandler implements TupleQueryResultHandler {
- private int count = 0;
-
- public int getCount() {
- return count;
- }
-
- public void resetCount() {
- this.count = 0;
- }
-
- @Override
- public void startQueryResult(List<String> arg0) throws TupleQueryResultHandlerException {
- }
-
- @Override
- public void handleSolution(BindingSet arg0) throws TupleQueryResultHandlerException {
- count++;
- }
-
- @Override
- public void endQueryResult() throws TupleQueryResultHandlerException {
- }
-
- @Override
- public void handleBoolean(boolean arg0) throws QueryResultHandlerException {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void handleLinks(List<String> arg0) throws QueryResultHandlerException {
- // TODO Auto-generated method stub
-
- }
- }
-}