You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@rya.apache.org by mi...@apache.org on 2016/02/07 19:26:05 UTC

[02/16] incubator-rya git commit: RYA-32 Improve how metadata and values are written to Accumulo PCJ tables

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/c12f58f4/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java
deleted file mode 100644
index bac9871..0000000
--- a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/ExternalProcessorTest.java
+++ /dev/null
@@ -1,1654 +0,0 @@
-package mvm.rya.indexing.external.tupleSet;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-
-import mvm.rya.indexing.external.ExternalProcessor;
-import mvm.rya.indexing.external.ExternalProcessor.BindingSetAssignmentCollector;
-import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
-import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet;
-
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.junit.Assert;
-import org.openrdf.query.algebra.Projection;
-import org.openrdf.query.algebra.QueryModelNode;
-import org.openrdf.query.algebra.StatementPattern;
-import org.openrdf.query.algebra.TupleExpr;
-import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
-import org.openrdf.query.algebra.helpers.StatementPatternCollector;
-import org.openrdf.query.parser.ParsedQuery;
-import org.openrdf.query.parser.sparql.SPARQLParser;
-
-import com.google.common.collect.Sets;
-
-
-
-
-
-
-public class ExternalProcessorTest {
-	
-	
-	
-	
-	private String queryString = ""//
-			+ "SELECT ?e ?c ?l ?o " //
-			+ "{" //
-			+ "  ?e a ?c . "//
-			+ "  ?c a ?l . "//
-			+ "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
-			+ "  ?e <uri:talksTo> ?o  "//
-			+ "}";//
-
-	private String indexSparqlString = ""//
-			+ "SELECT ?x ?y ?z " //
-			+ "{" //
-			+ "  ?x <http://www.w3.org/2000/01/rdf-schema#label> ?z. "//
-			+ "  ?x a ?y . "//
-			+ "  ?y a ?z  "//
-			+ "}";//
-	
-	
-	private String q1 = ""//
-			+ "SELECT ?e ?l ?c " //
-			+ "{" //
-			+ "  ?e a ?c . "//
-			+ "  ?c <http://www.w3.org/2000/01/rdf-schema#label> ?l. "//
-			+ "  ?l <uri:talksTo> ?e . "//
-			+ "}";//
-
-	private String q2 = ""//
-			+ "SELECT ?a ?t ?v  " //
-			+ "{" //
-			+ "  ?a a ?t . "//
-			+ "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?v . "//
-			+ "  ?v <uri:talksTo> ?a . "//
-			+ "}";//
-	
-	
-	
-	private String q5 = ""//
-			+ "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " //
-			+ "{" //
-			+ "  ?f a ?m ."//
-			+ "  ?e a ?l ."//
-			+ "  ?n a ?o ."//
-			+ "  ?a a ?h ."//
-			+ "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
-			+ "  ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
-			+ "  ?o <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
-			+ "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?r ."//
-			+ "  ?d <uri:talksTo> ?f . "//
-			+ "  ?c <uri:talksTo> ?e . "//
-			+ "  ?p <uri:talksTo> ?n . "//
-			+ "  ?r <uri:talksTo> ?a . "//
-			+ "}";//
-	
-	
-	
-	private String q7 = ""//
-			+ "SELECT ?s ?t ?u " //
-			+ "{" //
-			+ "  ?s a ?t ."//
-			+ "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
-			+ "  ?u <uri:talksTo> ?s . "//
-			+ "}";//
-	
-	
-	private String q8 = ""//
-			+ "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " //
-			+ "{" //
-			+ "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?r ."//
-			+ "  ?f a ?m ."//
-			+ "  ?p <uri:talksTo> ?n . "//
-			+ "  ?e a ?l ."//
-			+ "  ?o <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
-			+ "  ?d <uri:talksTo> ?f . "//
-			+ "  ?c <uri:talksTo> ?e . "//
-			+ "  ?n a ?o ."//
-			+ "  ?a a ?h ."//
-			+ "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
-			+ "  ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
-			+ "  ?r <uri:talksTo> ?a . "//
-			+ "}";//
-	
-	
-	
-	
-	private String q11 = ""//
-			+ "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit " //
-			+ "{" //
-			+ "  ?w a ?t ."//
-			+ "  ?x a ?y ."//
-			+ "  ?duck a ?chicken ."//
-			+ "  ?pig a ?rabbit ."//
-			+ "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?r ."//
-			+ "  ?f a ?m ."//
-			+ "  ?p <uri:talksTo> ?n . "//
-			+ "  ?e a ?l ."//
-			+ "  ?o <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
-			+ "  ?d <uri:talksTo> ?f . "//
-			+ "  ?c <uri:talksTo> ?e . "//
-			+ "  ?n a ?o ."//
-			+ "  ?a a ?h ."//
-			+ "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
-			+ "  ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
-			+ "  ?r <uri:talksTo> ?a . "//
-			+ "}";//
-	
-	
-	private String q12 = ""//
-			+ "SELECT ?b ?p ?dog ?cat " //
-			+ "{" //
-			+ "  ?b a ?p ."//
-			+ "  ?dog a ?cat. "//
-			+ "}";//
-	
-	
-	
-	private String q13 = ""//
-			+ "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r ?x ?y ?w ?t ?duck ?chicken ?pig ?rabbit ?dick ?jane ?betty " //
-			+ "{" //
-			+ "  ?w a ?t ."//
-			+ "  ?x a ?y ."//
-			+ "  ?duck a ?chicken ."//
-			+ "  ?pig a ?rabbit ."//
-			+ "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?r ."//
-			+ "  ?f a ?m ."//
-			+ "  ?p <uri:talksTo> ?n . "//
-			+ "  ?e a ?l ."//
-			+ "  ?o <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
-			+ "  ?d <uri:talksTo> ?f . "//
-			+ "  ?c <uri:talksTo> ?e . "//
-			+ "  ?n a ?o ."//
-			+ "  ?a a ?h ."//
-			+ "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
-			+ "  ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
-			+ "  ?r <uri:talksTo> ?a . "//
-			+ "  ?dick <uri:talksTo> ?jane . "//
-			+ "  ?jane <uri:talksTo> ?betty . "//
-			+ "}";//
-	
-	
-	private String q14 = ""//
-			+ "SELECT ?harry ?susan ?mary " //
-			+ "{" //
-			+ "  ?harry <uri:talksTo> ?susan . "//
-			+ "  ?susan <uri:talksTo> ?mary . "//
-			+ "}";//
-
-
-	
-	String q15 = ""//
-			+ "SELECT ?a ?b ?c ?d ?e ?f ?q " //
-			+ "{" //
-			+ " GRAPH ?x { " //
-			+ "  ?a a ?b ."//
-			+ "  ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
-			+ "  ?d <uri:talksTo> ?e . "//
-			+ "  FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " //
-			+ "  FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
-			+ "  ?b a ?q ."//
-			+ "		}"//
-			+ "}";//
-	
-	
-	String q16 = ""//
-			+ "SELECT ?g ?h ?i " //
-			+ "{" //
-			+ " GRAPH ?y { " //
-			+ "  ?g a ?h ."//
-			+ "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?i ."//
-			+ "		}"//
-			+ "}";//
-	
-	String q17 = ""//
-			+ "SELECT ?j ?k ?l ?m ?n ?o " //
-			+ "{" //
-			+ " GRAPH ?z { " //
-			+ "  ?j <uri:talksTo> ?k . "//
-			+ "  FILTER ( ?k < ?l && (?m > ?n || ?o = ?j) ). " //
-			+ "		}"//
-			+ "}";//
-	
-	String q18 = ""//
-			+ "SELECT ?r ?s ?t ?u " //
-			+ "{" //
-			+ " GRAPH ?q { " //
-			+ "  FILTER(bound(?r) && sameTerm(?s,?t)&&bound(?u)). " //
-			+ "  ?t a ?u ."//
-			+ "		}"//
-			+ "}";//
-	
-	
-	
-	String q19 = ""//
-			+ "SELECT ?a ?b ?c ?d ?e ?f ?q ?g ?h " //
-			+ "{" //
-			+ " GRAPH ?x { " //
-			+ "  ?a a ?b ."//
-			+ "  ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
-			+ "  ?d <uri:talksTo> ?e . "//
-			+ "  FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " //
-			+ "  FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
-			+ "  FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " //
-			+ "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?g. "//
-			+ "  ?b a ?q ."//
-			+ "		}"//
-			+ "}";//
-	
-	
-	String q20 = ""//
-			+ "SELECT ?m ?n " //
-			+ "{" //
-			+ " GRAPH ?q { " //
-			+ "  FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " //
-			+ "  ?n <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
-			+ "		}"//
-			+ "}";//
-	
-	
-	String q21 = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-			+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-			+ "SELECT ?feature ?point ?wkt " //
-			+ "{" //
-			+ "  ?feature a geo:Feature . "//
-			+ "  ?feature geo:hasGeometry ?point . "//
-			+ "  ?point a geo:Point . "//
-			+ "  ?point geo:asWKT ?wkt . "//
-			+ "  FILTER(geof:sfWithin(?wkt, \"Polygon\")) " //
-			+ "}";//
-	
-	
-	 String q22 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-             + "SELECT ?person ?commentmatch ?labelmatch" //
-             + "{" //
-             + "  ?person a <http://example.org/ontology/Person> . "//
-             + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?labelmatch . "//
-             + "  ?person <http://www.w3.org/2000/01/rdf-schema#comment> ?commentmatch . "//
-             + "  FILTER(fts:text(?labelmatch, \"bob\")) . " //
-             + "  FILTER(fts:text(?commentmatch, \"bob\"))  " //
-             + "}";//
-	 
-	 
-	 String q23 = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-				+ "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-				+ "SELECT ?a ?b ?c " //
-				+ "{" //
-				+ "  ?a a geo:Feature . "//
-				+ "  ?b a geo:Point . "//
-				+ "  ?b geo:asWKT ?c . "//
-				+ "  FILTER(geof:sfWithin(?c, \"Polygon\")) " //
-				+ "}";//
-	 
-	 
-	 String q24 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-             + "SELECT ?f ?g " //
-             + "{" //
-             + "  ?f <http://www.w3.org/2000/01/rdf-schema#comment> ?g . "//
-             + "  FILTER(fts:text(?g, \"bob\"))  " //
-             + "}";//
-	 
-
-	 String q25 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-             + "SELECT ?person ?commentmatch ?labelmatch ?point" //
-             + "{" //
-             + "  ?person a ?point. " //
-             + "  ?person a <http://example.org/ontology/Person> . "//
-             + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?labelmatch . "//
-             + "  ?person <http://www.w3.org/2000/01/rdf-schema#comment> ?commentmatch . "//
-             + "  FILTER((?person > ?point) || (?person = ?labelmatch)). "
-             + "  FILTER(fts:text(?labelmatch, \"bob\")) . " //
-             + "  FILTER(fts:text(?commentmatch, \"bob\"))  " //
-             + "}";//
-	 
-	 
-	 String q26 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-             + "SELECT ?a ?b ?c  " //
-             + "{" //
-             + "  ?a a ?c. " //
-             + "  ?a a <http://example.org/ontology/Person> . "//
-             + "  ?a <http://www.w3.org/2000/01/rdf-schema#label> ?b . "//
-             + "  FILTER((?a > ?c) || (?a = ?b)). "
-             + "  FILTER(fts:text(?b, \"bob\")) . " //
-             + "}";//
-	 
-	 
-	 
-	 String q27 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-			 + "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
-			 + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  "//
-             + "SELECT ?person ?commentmatch ?labelmatch ?other ?feature ?point ?wkt ?g ?h" //
-             + "{" //
-             + "  ?person a <http://example.org/ontology/Person> . "//
-             + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> ?labelmatch . "//
-             + "  ?person <http://www.w3.org/2000/01/rdf-schema#comment> ?commentmatch . "//
-             + "  FILTER((?person > ?other) || (?person = ?labelmatch)). "
-             + "  ?person a ?other. "//
-             + "  FILTER(fts:text(?labelmatch, \"bob\")) . " //
-             + "  FILTER(fts:text(?commentmatch, \"bob\"))  " //
-             + " ?feature a geo:Feature . "//
-		     + "  ?point a geo:Point . "//
-			 + "  ?point geo:asWKT ?wkt . "//
-			 + "  FILTER(geof:sfWithin(?wkt, \"Polygon\")) " //
-			 + "  FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " //
-			 + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?g. "//
-             + "}";//
-	 
-	 
-	 String q28 = ""//
-				+ "SELECT ?m ?n " //
-				+ "{" //
-				+ "  FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " //
-				+ "  ?n <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
-				+ "}";//
-	 
-	 
-	 String q29 = ""//
-				+ "SELECT ?m ?n ?o" //
-				+ "{" //
-				+ "  FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " //
-				+ "  ?n <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
-				+ "  ?m a ?o." //
-				+ "  FILTER(ISNUMERIC(?o))."
-				+ "}";//
-	 
-	 String q30 = ""//
-				+ "SELECT ?pig ?dog ?owl" //
-				+ "{" //
-				+ "  FILTER(?pig IN (1,2,3) && ?dog NOT IN(5,6,7)). " //
-				+ "  ?dog <http://www.w3.org/2000/01/rdf-schema#label> ?pig. "//
-				+ "  ?pig a ?owl. " //
-				+ "  FILTER(ISNUMERIC(?owl))."
-				+ "}";//
-	 
-	 
-	 String q31 = ""//
-	            + "SELECT ?q ?r ?s " //
-	            + "{" //
-	            + "  {?q a ?r} UNION {?r a ?s} ."//
-	            + "  ?r a ?s ."//
-	            + "}";// 
-	 
-	
-	 
-	 String q33 = ""//
-             + "SELECT ?q ?r ?s ?t " //
-             + "{" //
-             + "  OPTIONAL {?q a ?r} ."//
-             + "  ?s a ?t ."//
-             + "}";// 
-	 
-	 
-	 String q34 = ""//
-             + "SELECT ?q ?r  " //
-             + "{" //
-             + "  FILTER(?q > ?r) ."//
-             + "  ?q a ?r ."//
-             + "}";// 
-	 
-	 
-	 String q35 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-             + "SELECT ?s ?t ?u ?v ?w ?x ?y ?z " //
-             + "{" //
-             + "  FILTER(?s > ?t)."//
-             + "  ?s a ?t ."//
-             + "  FILTER(?u > ?v)."//
-             + "  ?u a ?v ."// 
-             + "  ?w <http://www.w3.org/2000/01/rdf-schema#label> ?x ."//
-             + "  FILTER(fts:text(?x, \"bob\")) . " //
-             + "  ?y <http://www.w3.org/2000/01/rdf-schema#label> ?z ."//
-             + "  FILTER(fts:text(?z, \"bob\")) . " //
-             + "}";// 
-	 
-	 
-	String q36 =  "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-	        + "SELECT ?dog ?cat  " //
-            + "{" //
-	        + "  ?dog <http://www.w3.org/2000/01/rdf-schema#label> ?cat ."//
-            + "  FILTER(fts:text(?cat, \"bob\")) . " //
-            + "}";// 
-	
-	 
-    String q37 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-            + "SELECT ?s ?t " //
-            + "{" //
-            + "  FILTER(?s > ?t)."//
-            + "  ?s a ?t ."//
-            + "  FILTER(?s > ?t)."//
-            + "  ?s a ?t ."// 
-             + "  FILTER(?s > ?t)."//
-            + "  ?s a ?t ."// 
-            + "}";// 
-            
-	
-	
-    String q38 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
-            + "SELECT ?s ?t " //
-            + "{" //
-            + "  FILTER(?s > ?t)."//
-            + "  ?s a ?t ."// 
-            + "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?s ."//
-            + "  FILTER(?s > ?t)."//
-            + "}";// 
-    
-    
-    
-    String q39 = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
-            + "SELECT ?s ?t " //
-            + "{" //
-            + " VALUES(?s) { (<ub:poodle>)(<ub:pitbull>)} ." //
-            + " ?t <ub:peesOn> <ub:rug> ." //
-            + " ?t <http://www.w3.org/2000/01/rdf-schema#label> ?s ."//
-            + "}";//
-
-    String q40 = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
-            + "SELECT ?u ?v " //
-            + "{" //
-            + " ?v <ub:peesOn> <ub:rug> ." //
-            + " ?v <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
-            + "}";//
-
-    String q41 = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
-            + "SELECT ?s ?t ?w ?x" //
-            + "{" //
-            + " FILTER(?s > ?t)."//
-            + " VALUES(?s) { (<ub:poodle>)(<ub:pitbull>)} ." //
-            + " VALUES(?w) { (<ub:persian>) (<ub:siamese>) } ." //
-            + " ?t <ub:peesOn> <ub:rug> ." //
-            + " ?t <http://www.w3.org/2000/01/rdf-schema#label> ?s ."//
-            + " ?w <ub:peesOn> <ub:rug> ." //
-            + " ?w <http://www.w3.org/2000/01/rdf-schema#label> ?x ."//
-            + "}";//
-
-    String q42 = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
-            + "SELECT ?u ?v " //
-            + "{" //
-            + " FILTER(?u > ?v)."//
-            + " ?v <ub:peesOn> <ub:rug> ." //
-            + " ?v <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
-            + "}";//
-
-    String q43 = "PREFIX fts: <http://rdf.useekm.com/fts#> "//
-            + "SELECT ?a ?b " //
-            + "{" //
-            + " ?b <ub:peesOn> <ub:rug> ." //
-            + " ?b <http://www.w3.org/2000/01/rdf-schema#label> ?a ."//
-            + "}";//
-    
-	
-
-	
-	@Test
-	public void testVarRelableIndexSmaller() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(queryString, null);
-		ParsedQuery pq2 = parser2.parseQuery(indexSparqlString, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Index is " + pq2.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup);
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		
-
-		
-		Assert.assertTrue(qSet.containsAll(set) && set.size() != 0);
-
-	}
-	
-	
-	
-	@Test
-	public void testVarRelableIndexSameSize() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q1, null);
-		ParsedQuery pq2 = parser2.parseQuery(q2, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Index is " + pq2.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup);
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		
-		
-		Assert.assertTrue(set.equals(qSet));
-
-
-	}
-	
-	
-	
-	
-	
-	@Test
-	public void testTwoIndexLargeQuery() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-		SPARQLParser parser3 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q11, null);
-		ParsedQuery pq2 = parser2.parseQuery(q7, null);
-		ParsedQuery pq3 = parser3.parseQuery(q12, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Indexes are " + pq2.getTupleExpr() + " and " + pq3.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup1);
-		list.add(extTup2);
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-	
-		
-		Assert.assertTrue(set.equals(qSet));
-
-
-	}
-	
-	
-	
-	@Test
-	public void testThreeIndexLargeQuery() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-		SPARQLParser parser3 = new SPARQLParser();
-		SPARQLParser parser4 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q13, null);
-		ParsedQuery pq2 = parser2.parseQuery(q5, null);
-		ParsedQuery pq3 = parser3.parseQuery(q12, null);
-		ParsedQuery pq4 = parser4.parseQuery(q14, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Indexes are " + pq2.getTupleExpr()+ " , " + pq3.getTupleExpr()+ " , " +pq4.getTupleExpr());
-		
-		SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-		SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr()));
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup1);
-		list.add(extTup2);
-		list.add(extTup3);
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		
-		Assert.assertTrue(set.equals(qSet));
-
-	}
-	
-	
-	
-	
-	
-	
-	
-	
-	@Test
-	public void testSingleIndexLargeQuery() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q8, null);
-		ParsedQuery pq2 = parser2.parseQuery(q7, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Index is " + pq2.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup);
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		
-		
-		Assert.assertTrue(set.equals(qSet));
-
-	}
-	
-	
-
-	
-	
-
-	@Test
-	public void testContextFilter() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-		SPARQLParser parser3 = new SPARQLParser();
-		SPARQLParser parser4 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q15, null);
-		ParsedQuery pq2 = parser2.parseQuery(q16, null);
-		ParsedQuery pq3 = parser3.parseQuery(q17, null);
-		ParsedQuery pq4 = parser4.parseQuery(q18, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Indexes are " + pq2.getTupleExpr()+ " , " + pq3.getTupleExpr()+ " , " +pq4.getTupleExpr());
-		
-		SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-		SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr()));
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup1);
-		list.add(extTup2);
-		list.add(extTup3);
-		
-		
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		
-		Assert.assertTrue(qSet.containsAll(set) && eTupSet.size() == 1);
-	}
-	
-	
-	
-	
-
-	@Test
-	public void testContextFilterFourIndex() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-		SPARQLParser parser3 = new SPARQLParser();
-		SPARQLParser parser4 = new SPARQLParser();
-		SPARQLParser parser5 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q19, null);
-		ParsedQuery pq2 = parser2.parseQuery(q16, null);
-		ParsedQuery pq3 = parser3.parseQuery(q17, null);
-		ParsedQuery pq4 = parser4.parseQuery(q18, null);
-		ParsedQuery pq5 = parser5.parseQuery(q20, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Indexes are " + pq2.getTupleExpr()+ " , " + pq3.getTupleExpr()+ " , " +pq4.getTupleExpr()+ " , " +pq5.getTupleExpr());
-		
-		SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-		SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr()));
-		SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet(new Projection(pq5.getTupleExpr()));
-		
-		
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup1);
-		list.add(extTup2);
-		list.add(extTup3);
-		list.add(extTup4);
-
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		
-		Assert.assertTrue(qSet.containsAll(set) && eTupSet.size() == 2);
-	}
-	
-	
-	
-	
-	@Test
-	public void testGeoIndexFunction() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q21, null);
-		ParsedQuery pq2 = parser2.parseQuery(q23, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Index is " + pq2.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup);
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		
-		
-		Assert.assertTrue(qSet.containsAll(set) && set.size() != 0);
-
-	}
-	
-	
-	
-	@Test
-	public void testFreeTestIndexFunction() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q22, null);
-		ParsedQuery pq2 = parser2.parseQuery(q24, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Index is " + pq2.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup);
-
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		
-		
-		Assert.assertTrue(qSet.containsAll(set) && set.size() != 0);
-
-	}
-	
-	
-	@Test
-	public void testThreeIndexGeoFreeCompareFilterMix() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-		SPARQLParser parser3 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q25, null);
-		ParsedQuery pq2 = parser2.parseQuery(q24, null);
-		ParsedQuery pq3 = parser3.parseQuery(q26, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Indexes are " + pq2.getTupleExpr() + " and " + pq3.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup1);
-		list.add(extTup2);
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-	
-		
-		Assert.assertTrue(set.equals(qSet) && eTupSet.size() == 2);
-
-
-	}
-	
-	
-	
-	
-	
-	@Test
-	public void testFourIndexGeoFreeCompareFilterMix() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-		SPARQLParser parser3 = new SPARQLParser();
-		SPARQLParser parser4 = new SPARQLParser();
-		SPARQLParser parser5 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q27, null);
-		ParsedQuery pq2 = parser2.parseQuery(q23, null);
-		ParsedQuery pq3 = parser3.parseQuery(q26, null);
-		ParsedQuery pq4 = parser4.parseQuery(q24, null);
-		ParsedQuery pq5 = parser5.parseQuery(q28, null);
-		
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Indexes are " + pq2.getTupleExpr() + " , " + pq3.getTupleExpr() + " , " + pq4.getTupleExpr()+ " and " + pq5.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-		SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr()));
-		SimpleExternalTupleSet extTup4 = new SimpleExternalTupleSet(new Projection(pq5.getTupleExpr()));
-
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup4);
-		list.add(extTup1);
-		list.add(extTup2);
-		list.add(extTup3);
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Assert.assertTrue(eTupSet.size() == 4);
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-	
-		
-		Assert.assertTrue(set.equals(qSet));
-
-
-
-	}
-	
-	
-	
-	
-	
-	@Test
-	public void testThreeIndexGeoFreeCompareFilterMix2() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-		SPARQLParser parser3 = new SPARQLParser();
-		SPARQLParser parser4 = new SPARQLParser();
-		
-
-		ParsedQuery pq1 = parser1.parseQuery(q27, null);
-		ParsedQuery pq2 = parser2.parseQuery(q23, null);
-		ParsedQuery pq3 = parser3.parseQuery(q26, null);
-		ParsedQuery pq4 = parser4.parseQuery(q28, null);
-		
-		
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Indexes are " + pq2.getTupleExpr() + " , " + pq3.getTupleExpr() + " , " + pq4.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-		SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new Projection(pq4.getTupleExpr()));
-		
-
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-
-		list.add(extTup1);
-		list.add(extTup3);
-		list.add(extTup2);
-		
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Assert.assertTrue(eTupSet.size() == 3);
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-	
-		Assert.assertTrue(qSet.containsAll(set));
-	
-
-	}
-	
-	
-	
-	
-	
-	
-	
-	@Test
-	public void testISNUMERIC() throws Exception {
-
-		SPARQLParser parser1 = new SPARQLParser();
-		SPARQLParser parser2 = new SPARQLParser();
-
-		ParsedQuery pq1 = parser1.parseQuery(q29, null);
-		ParsedQuery pq2 = parser2.parseQuery(q30, null);
-
-		System.out.println("Query is " + pq1.getTupleExpr());
-		System.out.println("Index is " + pq2.getTupleExpr());
-
-		
-		SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-		
-
-		List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-		list.add(extTup);
-
-		
-		ExternalProcessor processor = new ExternalProcessor(list);
-		
-		TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-		System.out.println("Processed query is " + tup);
-		
-		
-		ExternalTupleVstor visitor = new ExternalTupleVstor();
-		tup.visit(visitor);
-		
-		StatementPatternCollector spc = new StatementPatternCollector();
-		pq1.getTupleExpr().visit(spc);
-		Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-		
-		
-		ExternalTupleVstor eTup = new ExternalTupleVstor();
-		tup.visit(eTup);
-		Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-		Set<StatementPattern> set = Sets.newHashSet();
-		for(QueryModelNode s: eTupSet) {
-			StatementPatternCollector spc1 = new StatementPatternCollector();
-			((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-			Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-			for(StatementPattern t: tempSet) {
-				set.add(t);
-			}
-			
-		}
-		
-		Assert.assertTrue(set.equals(qSet) && eTupSet.size() == 1);
-		
-
-	}
-	
-	
-	@Test
-    public void testInvalidQueryUnion() throws Exception {
-
-        SPARQLParser parser1 = new SPARQLParser();
-        SPARQLParser parser2 = new SPARQLParser();
-
-        ParsedQuery pq1 = parser1.parseQuery(q31, null);
-        ParsedQuery pq2 = parser2.parseQuery(q31, null);
-
-        System.out.println("Query is " + pq1.getTupleExpr());
-        System.out.println("Index is " + pq2.getTupleExpr());
-
-        
-        SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-        
-
-        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-        list.add(extTup);
-        
-        boolean thrown = false;
-
-        try {
-            ExternalProcessor processor = new ExternalProcessor(list);
-            processor.process(pq1.getTupleExpr());
-        } catch (IllegalArgumentException e) {
-            System.out.println(e);
-            thrown = true;
-        }
-
-        Assert.assertTrue(thrown);
-
-    }
-	
-	
-
-    
-	
-	@Test
-    public void testInvalidQueryOptional() throws Exception {
-
-        SPARQLParser parser1 = new SPARQLParser();
-        SPARQLParser parser2 = new SPARQLParser();
-
-        ParsedQuery pq1 = parser1.parseQuery(q33, null);
-        ParsedQuery pq2 = parser2.parseQuery(q33, null);
-
-        System.out.println("Query is " + pq1.getTupleExpr());
-        System.out.println("Index is " + pq2.getTupleExpr());
-
-        
-        SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-        
-
-        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-        list.add(extTup);
-        
-        boolean thrown = false;
-
-        try {
-            ExternalProcessor processor = new ExternalProcessor(list);
-            processor.process(pq1.getTupleExpr());
-        } catch (IllegalArgumentException e) {
-            System.out.println(e);
-            thrown = true;
-        }
-
-        Assert.assertTrue(thrown);
-
-    }
-    
-	
-	
-	
-	@Test
-    public void testTwoRepeatedIndex() throws Exception {
-
-        SPARQLParser parser1 = new SPARQLParser();
-        SPARQLParser parser2 = new SPARQLParser();
-        SPARQLParser parser3 = new SPARQLParser();
-
-        ParsedQuery pq1 = parser1.parseQuery(q35, null);
-        ParsedQuery pq2 = parser2.parseQuery(q34, null);
-        ParsedQuery pq3 = parser3.parseQuery(q36, null);
-
-        System.out.println("Query is " + pq1.getTupleExpr());
-        System.out.println("Indexes are " + pq2.getTupleExpr() + " and " + pq3.getTupleExpr());
-
-        
-        SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-        SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-
-        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-        list.add(extTup1);
-        list.add(extTup2);
-
-        
-        ExternalProcessor processor = new ExternalProcessor(list);
-        
-        TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-        System.out.println("Processed query is " + tup);
-        
-        
-        
-        ExternalTupleVstor visitor = new ExternalTupleVstor();
-        tup.visit(visitor);
-        
-        StatementPatternCollector spc = new StatementPatternCollector();
-        pq1.getTupleExpr().visit(spc);
-        Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-        
-        
-        ExternalTupleVstor eTup = new ExternalTupleVstor();
-        tup.visit(eTup);
-        Set<QueryModelNode> eTupSet =  eTup.getExtTup();
-        Set<StatementPattern> set = Sets.newHashSet();
-        for(QueryModelNode s: eTupSet) {
-            StatementPatternCollector spc1 = new StatementPatternCollector();
-            ((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-            Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-            for(StatementPattern t: tempSet) {
-                set.add(t);
-            }
-            
-        }
-    
-        
-        Assert.assertTrue(set.equals(qSet) && eTupSet.size()==4);
-
-
-    }
-    
-	
-	
-	@Test
-    public void testRepeatedStatementPatternQuery() throws Exception {
-
-        SPARQLParser parser1 = new SPARQLParser();
-        SPARQLParser parser2 = new SPARQLParser();
-
-        ParsedQuery pq1 = parser1.parseQuery(q37, null);
-        ParsedQuery pq2 = parser2.parseQuery(q34, null);
-
-        System.out.println("Query is " + pq1.getTupleExpr());
-        System.out.println("Index is " + pq2.getTupleExpr());
-
-        
-        SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-        
-
-        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-        list.add(extTup);
-        
-        boolean thrown = false;
-
-        try {
-            ExternalProcessor processor = new ExternalProcessor(list);
-            processor.process(pq1.getTupleExpr());
-        } catch (IllegalArgumentException e) {
-            System.out.println(e);
-            thrown = true;
-        }
-
-        Assert.assertTrue(thrown);
-    }
-	
-	
-	
-	
-
- 
-    
-    
-	@Test
-    public void testRepeatedFilterQuery() throws Exception {
-
-        SPARQLParser parser1 = new SPARQLParser();
-        SPARQLParser parser2 = new SPARQLParser();
-
-        ParsedQuery pq1 = parser1.parseQuery(q38, null);
-        ParsedQuery pq2 = parser2.parseQuery(q38, null);
-
-        System.out.println("Query is " + pq1.getTupleExpr());
-        System.out.println("Index is " + pq2.getTupleExpr());
-
-        
-        SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-        
-
-        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-        list.add(extTup);
-        
-        boolean thrown = false;
-
-        try {
-            ExternalProcessor processor = new ExternalProcessor(list);
-            processor.process(pq1.getTupleExpr());
-        } catch (IllegalArgumentException e) {
-            System.out.println(e);
-            thrown = true;
-        }
-
-        Assert.assertTrue(thrown);
-    }
-	
-	
-
-	
-    @Test
-    public void testBindingSetAssignment1() throws Exception {
-
-        SPARQLParser parser1 = new SPARQLParser();
-        SPARQLParser parser2 = new SPARQLParser();
-
-        ParsedQuery pq1 = parser1.parseQuery(q39, null);
-        ParsedQuery pq2 = parser2.parseQuery(q40, null);
-
-        SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-
-        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-
-        list.add(extTup1);
-
-        ExternalProcessor processor = new ExternalProcessor(list);
-
-        TupleExpr tup = processor.process(pq1.getTupleExpr());
-
-        ExternalTupleVstor visitor = new ExternalTupleVstor();
-        tup.visit(visitor);
-
-        StatementPatternCollector spc = new StatementPatternCollector();
-        pq1.getTupleExpr().visit(spc);
-        Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-
-        ExternalTupleVstor eTup = new ExternalTupleVstor();
-        tup.visit(eTup);
-        Set<QueryModelNode> eTupSet = eTup.getExtTup();
-        Set<StatementPattern> set = Sets.newHashSet();
-        for (QueryModelNode s : eTupSet) {
-            StatementPatternCollector spc1 = new StatementPatternCollector();
-            ((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-            Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-            for (StatementPattern t : tempSet) {
-                set.add(t);
-            }
-
-            Assert.assertTrue(set.equals(qSet) && eTupSet.size() == 1);
-
-            BindingSetAssignmentCollector bsac1 = new BindingSetAssignmentCollector();
-            BindingSetAssignmentCollector bsac2 = new BindingSetAssignmentCollector();
-            pq1.getTupleExpr().visit(bsac1);
-            tup.visit(bsac2);
-
-            Assert.assertTrue(bsac1.getBindingSetAssignments().equals(bsac2.getBindingSetAssignments()));
-
-        }
-    }
-    
-    
-    @Test
-    public void testBindingSetAssignment2() throws Exception {
-
-        SPARQLParser parser1 = new SPARQLParser();
-        SPARQLParser parser2 = new SPARQLParser();
-        SPARQLParser parser3 = new SPARQLParser();
-
-        ParsedQuery pq1 = parser1.parseQuery(q41, null);
-        ParsedQuery pq2 = parser2.parseQuery(q42, null);
-        ParsedQuery pq3 = parser2.parseQuery(q43, null);
-
-        SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new Projection(pq2.getTupleExpr()));
-        SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new Projection(pq3.getTupleExpr()));
-
-        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
-        list.add(extTup1);
-        list.add(extTup2);
-
-        ExternalProcessor processor = new ExternalProcessor(list);
-        TupleExpr tup = processor.process(pq1.getTupleExpr());
-        System.out.println("Processed query is " + tup);
-
-        ExternalTupleVstor visitor = new ExternalTupleVstor();
-        tup.visit(visitor);
-
-        StatementPatternCollector spc = new StatementPatternCollector();
-        pq1.getTupleExpr().visit(spc);
-        Set<StatementPattern> qSet = Sets.newHashSet(spc.getStatementPatterns());
-        ExternalTupleVstor eTup = new ExternalTupleVstor();
-        tup.visit(eTup);
-        Set<QueryModelNode> eTupSet = eTup.getExtTup();
-        Set<StatementPattern> set = Sets.newHashSet();
-        for (QueryModelNode s : eTupSet) {
-            StatementPatternCollector spc1 = new StatementPatternCollector();
-            ((ExternalTupleSet) s).getTupleExpr().visit(spc1);
-            Set<StatementPattern> tempSet = Sets.newHashSet(spc1.getStatementPatterns());
-            for (StatementPattern t : tempSet) {
-                set.add(t);
-            }
-        }
-
-        Assert.assertTrue(set.equals(qSet) && eTupSet.size() == 2);
-
-        BindingSetAssignmentCollector bsac1 = new BindingSetAssignmentCollector();
-        BindingSetAssignmentCollector bsac2 = new BindingSetAssignmentCollector();
-        pq1.getTupleExpr().visit(bsac1);
-        tup.visit(bsac2);
-
-        Assert.assertTrue(bsac1.getBindingSetAssignments().equals(bsac2.getBindingSetAssignments()));
-
-    }
-    
-    
-	
-    public static class ExternalTupleVstor extends QueryModelVisitorBase<RuntimeException> {
-
-        private Set<QueryModelNode> eSet = new HashSet<QueryModelNode>();
-
-        @Override
-        public void meetNode(QueryModelNode node) throws RuntimeException {
-            if (node instanceof ExternalTupleSet) {
-                eSet.add(node);
-            }
-            super.meetNode(node);
-        }
-
-        public Set<QueryModelNode> getExtTup() {
-            return eSet;
-        }
-
-    }
-	
-	
-	
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/c12f58f4/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/PcjTablesIntegrationTests.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/PcjTablesIntegrationTests.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/PcjTablesIntegrationTests.java
new file mode 100644
index 0000000..b8b6a57
--- /dev/null
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/PcjTablesIntegrationTests.java
@@ -0,0 +1,413 @@
+package mvm.rya.indexing.external.tupleSet;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.resolver.RyaTypeResolverException;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.indexing.external.tupleSet.PcjTables.PcjException;
+import mvm.rya.indexing.external.tupleSet.PcjTables.PcjMetadata;
+import mvm.rya.indexing.external.tupleSet.PcjTables.PcjTableNameFactory;
+import mvm.rya.indexing.external.tupleSet.PcjTables.PcjVarOrderFactory;
+import mvm.rya.indexing.external.tupleSet.PcjTables.ShiftVarOrderFactory;
+import mvm.rya.indexing.external.tupleSet.PcjTables.VariableOrder;
+import mvm.rya.rdftriplestore.RdfCloudTripleStore;
+import mvm.rya.rdftriplestore.RyaSailRepository;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.Instance;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.ZooKeeperInstance;
+import org.apache.accumulo.core.client.security.tokens.PasswordToken;
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.minicluster.MiniAccumuloCluster;
+import org.apache.hadoop.io.Text;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.model.Statement;
+import org.openrdf.model.impl.LiteralImpl;
+import org.openrdf.model.impl.NumericLiteralImpl;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.impl.URIImpl;
+import org.openrdf.model.vocabulary.XMLSchema;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.impl.MapBindingSet;
+import org.openrdf.repository.RepositoryConnection;
+import org.openrdf.repository.RepositoryException;
+
+import com.google.common.base.Optional;
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Multimap;
+import com.google.common.collect.Sets;
+import com.google.common.io.Files;
+
+/**
+ * Performs integration test using {@link MiniAccumuloCluster} to ensure the
+ * functions of {@link PcjTables} work within a cluster setting.
+ */
+public class PcjTablesIntegrationTests {
+    private static final Logger log = Logger.getLogger(PcjTablesIntegrationTests.class);
+
+    protected static final String RYA_TABLE_PREFIX = "demo_";
+
+    // Rya data store and connections.
+    protected MiniAccumuloCluster accumulo = null;
+    protected static Connector accumuloConn = null;
+    protected RyaSailRepository ryaRepo = null;
+    protected RepositoryConnection ryaConn = null;
+
+    @Before
+    public void setupMiniResources() throws IOException, InterruptedException, AccumuloException, AccumuloSecurityException, RepositoryException {
+        // Initialize the Mini Accumulo that will be used to store Triples and get a connection to it.
+        accumulo = startMiniAccumulo();
+
+        // Setup the Rya library to use the Mini Accumulo.
+        ryaRepo = setupRya(accumulo);
+        ryaConn = ryaRepo.getConnection();
+    }
+
+    /**
+     * Ensure that when a new PCJ table is created, it is initialized with the
+     * correct metadata values.
+     * <p>
+     * The method being tested is {@link PcjTables#createPcjTable(Connector, String, Set, String)}
+     */
+    @Test
+    public void createPcjTable() throws PcjException {
+        final String sparql =
+                "SELECT ?name ?age " +
+                "{" +
+                  "FILTER(?age < 30) ." +
+                  "?name <http://hasAge> ?age." +
+                  "?name <http://playsSport> \"Soccer\" " +
+                "}";
+
+        // Create a PCJ table in the Mini Accumulo.
+        final String pcjTableName = new PcjTableNameFactory().makeTableName(RYA_TABLE_PREFIX, "testPcj");
+        Set<VariableOrder> varOrders = new ShiftVarOrderFactory().makeVarOrders(new VariableOrder("name;age"));
+        PcjTables pcjs = new PcjTables();
+        pcjs.createPcjTable(accumuloConn, pcjTableName, varOrders, sparql);
+
+        // Fetch the PcjMetadata and ensure it has the correct values.
+        final PcjMetadata pcjMetadata = pcjs.getPcjMetadata(accumuloConn, pcjTableName);
+
+        // Ensure the metadata matches the expected value.
+        final PcjMetadata expected = new PcjMetadata(sparql, 0L, varOrders);
+        assertEquals(expected, pcjMetadata);
+    }
+
+    /**
+     * Ensure when results have been written to the PCJ table that they are in Accumulo.
+     * <p>
+     * The method being tested is {@link PcjTables#addResults(Connector, String, java.util.Collection)}
+     */
+    @Test
+    public void addResults() throws PcjException, TableNotFoundException, RyaTypeResolverException {
+        final String sparql =
+                "SELECT ?name ?age " +
+                "{" +
+                  "FILTER(?age < 30) ." +
+                  "?name <http://hasAge> ?age." +
+                  "?name <http://playsSport> \"Soccer\" " +
+                "}";
+
+        // Create a PCJ table in the Mini Accumulo.
+        final String pcjTableName = new PcjTableNameFactory().makeTableName(RYA_TABLE_PREFIX, "testPcj");
+        Set<VariableOrder> varOrders = new ShiftVarOrderFactory().makeVarOrders(new VariableOrder("name;age"));
+        PcjTables pcjs = new PcjTables();
+        pcjs.createPcjTable(accumuloConn, pcjTableName, varOrders, sparql);
+
+        // Add a few results to the PCJ table.
+        MapBindingSet alice = new MapBindingSet();
+        alice.addBinding("name", new URIImpl("http://Alice"));
+        alice.addBinding("age", new NumericLiteralImpl(14, XMLSchema.INTEGER));
+
+        MapBindingSet bob = new MapBindingSet();
+        bob.addBinding("name", new URIImpl("http://Bob"));
+        bob.addBinding("age", new NumericLiteralImpl(16, XMLSchema.INTEGER));
+
+        MapBindingSet charlie = new MapBindingSet();
+        charlie.addBinding("name", new URIImpl("http://Charlie"));
+        charlie.addBinding("age", new NumericLiteralImpl(12, XMLSchema.INTEGER));
+
+        Set<BindingSet> results = Sets.<BindingSet>newHashSet(alice, bob, charlie);
+        pcjs.addResults(accumuloConn, pcjTableName, results);
+
+        // Make sure the cardinality was updated.
+        PcjMetadata metadata = pcjs.getPcjMetadata(accumuloConn, pcjTableName);
+        assertEquals(3, metadata.getCardinality());
+
+        // Scan Accumulo for the stored results.
+        Multimap<String, BindingSet> fetchedResults = loadPcjResults(accumuloConn, pcjTableName);
+
+        // Ensure the expected results match those that were stored.
+        Multimap<String, BindingSet> expectedResults = HashMultimap.create();
+        expectedResults.putAll("name;age", results);
+        expectedResults.putAll("age;name", results);
+        assertEquals(expectedResults, fetchedResults);
+    }
+
+    /**
+     * Ensure when results are already stored in Rya, that we are able to populate
+     * the PCJ table for a new SPARQL query using those results.
+     * <p>
+     * The method being tested is: {@link PcjTables#populatePcj(Connector, String, RepositoryConnection, String)}
+     */
+    @Test
+    public void populatePcj() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException {
+        // Load some Triples into Rya.
+        Set<Statement> triples = new HashSet<>();
+        triples.add( new StatementImpl(new URIImpl("http://Alice"), new URIImpl("http://hasAge"), new NumericLiteralImpl(14, XMLSchema.INTEGER)) );
+        triples.add( new StatementImpl(new URIImpl("http://Alice"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")) );
+        triples.add( new StatementImpl(new URIImpl("http://Bob"), new URIImpl("http://hasAge"), new NumericLiteralImpl(16, XMLSchema.INTEGER)) );
+        triples.add( new StatementImpl(new URIImpl("http://Bob"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")) );
+        triples.add( new StatementImpl(new URIImpl("http://Charlie"), new URIImpl("http://hasAge"), new NumericLiteralImpl(12, XMLSchema.INTEGER)) );
+        triples.add( new StatementImpl(new URIImpl("http://Charlie"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")) );
+        triples.add( new StatementImpl(new URIImpl("http://Eve"), new URIImpl("http://hasAge"), new NumericLiteralImpl(43, XMLSchema.INTEGER)) );
+        triples.add( new StatementImpl(new URIImpl("http://Eve"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")) );
+
+        for(Statement triple : triples) {
+            ryaConn.add(triple);
+        }
+
+        // Create a PCJ table that will include those triples in its results.
+        final String sparql =
+                "SELECT ?name ?age " +
+                "{" +
+                  "FILTER(?age < 30) ." +
+                  "?name <http://hasAge> ?age." +
+                  "?name <http://playsSport> \"Soccer\" " +
+                "}";
+
+        final String pcjTableName = new PcjTableNameFactory().makeTableName(RYA_TABLE_PREFIX, "testPcj");
+        Set<VariableOrder> varOrders = new ShiftVarOrderFactory().makeVarOrders(new VariableOrder("name;age"));
+        PcjTables pcjs = new PcjTables();
+        pcjs.createPcjTable(accumuloConn, pcjTableName, varOrders, sparql);
+
+        // Populate the PCJ table using a Rya connection.
+        pcjs.populatePcj(accumuloConn, pcjTableName, ryaConn);
+
+        // Scan Accumulo for the stored results.
+        Multimap<String, BindingSet> fetchedResults = loadPcjResults(accumuloConn, pcjTableName);
+
+        // Make sure the cardinality was updated.
+        PcjMetadata metadata = pcjs.getPcjMetadata(accumuloConn, pcjTableName);
+        assertEquals(3, metadata.getCardinality());
+
+        // Ensure the expected results match those that were stored.
+        MapBindingSet alice = new MapBindingSet();
+        alice.addBinding("name", new URIImpl("http://Alice"));
+        alice.addBinding("age", new NumericLiteralImpl(14, XMLSchema.INTEGER));
+
+        MapBindingSet bob = new MapBindingSet();
+        bob.addBinding("name", new URIImpl("http://Bob"));
+        bob.addBinding("age", new NumericLiteralImpl(16, XMLSchema.INTEGER));
+
+        MapBindingSet charlie = new MapBindingSet();
+        charlie.addBinding("name", new URIImpl("http://Charlie"));
+        charlie.addBinding("age", new NumericLiteralImpl(12, XMLSchema.INTEGER));
+
+        Set<BindingSet> results = Sets.<BindingSet>newHashSet(alice, bob, charlie);
+
+        Multimap<String, BindingSet> expectedResults = HashMultimap.create();
+        expectedResults.putAll("name;age", results);
+        expectedResults.putAll("age;name", results);
+        assertEquals(expectedResults, fetchedResults);
+    }
+
+    /**
+     * Ensure the method that creates a new PCJ table, scans Rya for matches, and
+     * stores them in the PCJ table works.
+     * <p>
+     * The method being tested is: {@link PcjTables#createAndPopulatePcj(RepositoryConnection, Connector, String, String, String[], Optional)}
+     */
+    @Test
+    public void createAndPopulatePcj() throws RepositoryException, PcjException, TableNotFoundException, RyaTypeResolverException {
+        // Load some Triples into Rya.
+        Set<Statement> triples = new HashSet<>();
+        triples.add( new StatementImpl(new URIImpl("http://Alice"), new URIImpl("http://hasAge"), new NumericLiteralImpl(14, XMLSchema.INTEGER)) );
+        triples.add( new StatementImpl(new URIImpl("http://Alice"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")) );
+        triples.add( new StatementImpl(new URIImpl("http://Bob"), new URIImpl("http://hasAge"), new NumericLiteralImpl(16, XMLSchema.INTEGER)) );
+        triples.add( new StatementImpl(new URIImpl("http://Bob"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")) );
+        triples.add( new StatementImpl(new URIImpl("http://Charlie"), new URIImpl("http://hasAge"), new NumericLiteralImpl(12, XMLSchema.INTEGER)) );
+        triples.add( new StatementImpl(new URIImpl("http://Charlie"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")) );
+        triples.add( new StatementImpl(new URIImpl("http://Eve"), new URIImpl("http://hasAge"), new NumericLiteralImpl(43, XMLSchema.INTEGER)) );
+        triples.add( new StatementImpl(new URIImpl("http://Eve"), new URIImpl("http://playsSport"), new LiteralImpl("Soccer")) );
+
+        for(Statement triple : triples) {
+            ryaConn.add(triple);
+        }
+
+        // Create a PCJ table that will include those triples in its results.
+        final String sparql =
+                "SELECT ?name ?age " +
+                "{" +
+                  "FILTER(?age < 30) ." +
+                  "?name <http://hasAge> ?age." +
+                  "?name <http://playsSport> \"Soccer\" " +
+                "}";
+
+        final String pcjTableName = new PcjTableNameFactory().makeTableName(RYA_TABLE_PREFIX, "testPcj");
+
+        // Create and populate the PCJ table.
+        PcjTables pcjs = new PcjTables();
+        pcjs.createAndPopulatePcj(ryaConn, accumuloConn, pcjTableName, sparql, new String[]{"name", "age"}, Optional.<PcjVarOrderFactory>absent());
+
+        // Make sure the cardinality was updated.
+        PcjMetadata metadata = pcjs.getPcjMetadata(accumuloConn, pcjTableName);
+        assertEquals(3, metadata.getCardinality());
+
+        // Scan Accumulo for the stored results.
+        Multimap<String, BindingSet> fetchedResults = loadPcjResults(accumuloConn, pcjTableName);
+
+        // Ensure the expected results match those that were stored.
+        MapBindingSet alice = new MapBindingSet();
+        alice.addBinding("name", new URIImpl("http://Alice"));
+        alice.addBinding("age", new NumericLiteralImpl(14, XMLSchema.INTEGER));
+
+        MapBindingSet bob = new MapBindingSet();
+        bob.addBinding("name", new URIImpl("http://Bob"));
+        bob.addBinding("age", new NumericLiteralImpl(16, XMLSchema.INTEGER));
+
+        MapBindingSet charlie = new MapBindingSet();
+        charlie.addBinding("name", new URIImpl("http://Charlie"));
+        charlie.addBinding("age", new NumericLiteralImpl(12, XMLSchema.INTEGER));
+
+        Set<BindingSet> results = Sets.<BindingSet>newHashSet(alice, bob, charlie);
+
+        Multimap<String, BindingSet> expectedResults = HashMultimap.create();
+        expectedResults.putAll("name;age", results);
+        expectedResults.putAll("age;name", results);
+
+        assertEquals(expectedResults, fetchedResults);
+    }
+
+    /**
+     * Scan accumulo for the results that are stored in a PCJ table. The
+     * multimap stores a set of deserialized binding sets that were in the PCJ
+     * table for every variable order that is found in the PCJ metadata.
+     */
+    private static Multimap<String, BindingSet> loadPcjResults(Connector accumuloConn, String pcjTableName) throws PcjException, TableNotFoundException, RyaTypeResolverException {
+        Multimap<String, BindingSet> fetchedResults = HashMultimap.create();
+
+        // Get the variable orders the data was written to.
+        PcjTables pcjs = new PcjTables();
+        PcjMetadata pcjMetadata = pcjs.getPcjMetadata(accumuloConn, pcjTableName);
+
+        // Scan Accumulo for the stored results.
+        for(VariableOrder varOrder : pcjMetadata.getVarOrders()) {
+            Scanner scanner = accumuloConn.createScanner(pcjTableName, new Authorizations());
+            scanner.fetchColumnFamily( new Text(varOrder.toString()) );
+
+            for(Entry<Key, Value> entry : scanner) {
+                byte[] serializedResult = entry.getKey().getRow().getBytes();
+                BindingSet result = AccumuloPcjSerializer.deSerialize(serializedResult, varOrder.toArray());
+                fetchedResults.put(varOrder.toString(), result);
+            }
+        }
+
+        return fetchedResults;
+    }
+
+    @After
+    public void shutdownMiniResources() {
+        if(ryaConn != null) {
+            try {
+                log.info("Shutting down Rya Connection.");
+                ryaConn.close();
+                log.info("Rya Connection shut down.");
+            } catch(final Exception e) {
+                log.error("Could not shut down the Rya Connection.", e);
+            }
+        }
+
+        if(ryaRepo != null) {
+            try {
+                log.info("Shutting down Rya Repo.");
+                ryaRepo.shutDown();
+                log.info("Rya Repo shut down.");
+            } catch(final Exception e) {
+                log.error("Could not shut down the Rya Repo.", e);
+            }
+        }
+
+        if(accumulo != null) {
+            try {
+                log.info("Shutting down the Mini Accumulo being used as a Rya store.");
+                accumulo.stop();
+                log.info("Mini Accumulo being used as a Rya store shut down.");
+            } catch(final Exception e) {
+                log.error("Could not shut down the Mini Accumulo.", e);
+            }
+        }
+    }
+
+    /**
+     * Setup a Mini Accumulo cluster that uses a temporary directory to store its data.
+     *
+     * @return A Mini Accumulo cluster.
+     */
+    private static MiniAccumuloCluster startMiniAccumulo() throws IOException, InterruptedException, AccumuloException, AccumuloSecurityException {
+        final File miniDataDir = Files.createTempDir();
+
+        // Setup and start the Mini Accumulo.
+        final MiniAccumuloCluster accumulo = new MiniAccumuloCluster(miniDataDir, "password");
+        accumulo.start();
+
+        // Store a connector to the Mini Accumulo.
+        final Instance instance = new ZooKeeperInstance(accumulo.getInstanceName(), accumulo.getZooKeepers());
+        accumuloConn = instance.getConnector("root", new PasswordToken("password"));
+
+        return accumulo;
+    }
+
+    /**
+     * Format a Mini Accumulo to be a Rya repository.
+     *
+     * @param accumulo - The Mini Accumulo cluster Rya will sit on top of. (not null)
+     * @return The Rya repository sitting on top of the Mini Accumulo.
+     */
+    private static RyaSailRepository setupRya(final MiniAccumuloCluster accumulo) throws AccumuloException, AccumuloSecurityException, RepositoryException {
+        checkNotNull(accumulo);
+
+        // Setup the Rya Repository that will be used to create Repository Connections.
+        final RdfCloudTripleStore ryaStore = new RdfCloudTripleStore();
+        final AccumuloRyaDAO crdfdao = new AccumuloRyaDAO();
+        crdfdao.setConnector(accumuloConn);
+
+        // Setup Rya configuration values.
+        final AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+        conf.setTablePrefix("demo_");
+        conf.setDisplayQueryPlan(true);
+
+        conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, true);
+        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, RYA_TABLE_PREFIX);
+        conf.set(ConfigUtils.CLOUDBASE_USER, "root");
+        conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "password");
+        conf.set(ConfigUtils.CLOUDBASE_INSTANCE, accumulo.getInstanceName());
+
+        crdfdao.setConf(conf);
+        ryaStore.setRyaDAO(crdfdao);
+
+        final RyaSailRepository ryaRepo = new RyaSailRepository(ryaStore);
+        ryaRepo.initialize();
+
+        return ryaRepo;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/c12f58f4/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/PcjTablesTests.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/PcjTablesTests.java b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/PcjTablesTests.java
new file mode 100644
index 0000000..0a8ebc8
--- /dev/null
+++ b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/PcjTablesTests.java
@@ -0,0 +1,65 @@
+package mvm.rya.indexing.external.tupleSet;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.Set;
+
+import org.junit.Test;
+
+import com.google.common.collect.Sets;
+
+import mvm.rya.indexing.external.tupleSet.PcjTables.PcjMetadata;
+import mvm.rya.indexing.external.tupleSet.PcjTables.ShiftVarOrderFactory;
+import mvm.rya.indexing.external.tupleSet.PcjTables.VariableOrder;
+
+/**
+ * Tests the classes and methods of {@link PcjTables}.
+ */
+public class PcjTablesTests {
+
+    @Test
+    public void variableOrder_hashCode() {
+        assertEquals(new VariableOrder("a", "b", "C").hashCode(), new VariableOrder("a", "b", "C").hashCode());
+    }
+
+    @Test
+    public void variableOrder_equals() {
+        assertEquals(new VariableOrder("a", "b", "C"), new VariableOrder("a", "b", "C"));
+    }
+
+    @Test
+    public void variableOrder_fromString() {
+        assertEquals(new VariableOrder("a", "b", "c"), new VariableOrder("a;b;c"));
+    }
+
+    @Test
+    public void variableORder_toString() {
+        assertEquals("a;b;c", new VariableOrder("a", "b", "c").toString());
+    }
+
+    @Test
+    public void pcjMetadata_hashCode() {
+        PcjMetadata meta1 = new PcjMetadata("A SPARQL string.", 5, Sets.newHashSet(new VariableOrder("a", "b", "c"), new VariableOrder("d", "e", "f")));
+        PcjMetadata meta2 = new PcjMetadata("A SPARQL string.", 5, Sets.newHashSet(new VariableOrder("a", "b", "c"), new VariableOrder("d", "e", "f")));
+        assertEquals(meta1.hashCode(), meta2.hashCode());
+    }
+
+    @Test
+    public void pcjMetadata_equals() {
+        PcjMetadata meta1 = new PcjMetadata("A SPARQL string.", 5, Sets.newHashSet(new VariableOrder("a", "b", "c"), new VariableOrder("d", "e", "f")));
+        PcjMetadata meta2 = new PcjMetadata("A SPARQL string.", 5, Sets.newHashSet(new VariableOrder("a", "b", "c"), new VariableOrder("d", "e", "f")));
+        assertEquals(meta1, meta2);
+    }
+
+    @Test
+    public void shiftVarOrdersFactory() {
+        Set<VariableOrder> expected = Sets.newHashSet(
+                new VariableOrder("a;b;c"),
+                new VariableOrder("b;c;a"),
+                new VariableOrder("c;a;b"));
+
+        Set<VariableOrder> varOrders = new ShiftVarOrderFactory().makeVarOrders(new VariableOrder("a;b;c"));
+        assertEquals(expected, varOrders);
+    }
+
+}
\ No newline at end of file