You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@rya.apache.org by ca...@apache.org on 2017/10/12 18:43:22 UTC

[01/11] incubator-rya git commit: RYA-402 Create Kafka reusable test code project. Closes #242.

Repository: incubator-rya
Updated Branches:
  refs/heads/master 6dd81bd8e -> 29a8e6b75


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/test/pom.xml
----------------------------------------------------------------------
diff --git a/test/pom.xml b/test/pom.xml
new file mode 100644
index 0000000..d458f8f
--- /dev/null
+++ b/test/pom.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements. See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership. The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License. You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied. See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+    <parent>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya-project</artifactId>
+        <version>3.2.12-incubating-SNAPSHOT</version>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <artifactId>rya.test.parent</artifactId>
+
+    <name>Apache Rya Test Parent</name>
+    <description>The parent pom file for any rya.test project.</description>
+
+    <packaging>pom</packaging>
+
+    <modules>
+        <module>kafka</module>
+    </modules>
+</project>


[05/11] incubator-rya git commit: RYA-401 Fixed all default charset bugs. Closes #243.

Posted by ca...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/StarQuery.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/StarQuery.java b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/StarQuery.java
index 3700988..dedf85d 100644
--- a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/StarQuery.java
+++ b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/StarQuery.java
@@ -1,5 +1,7 @@
 package org.apache.rya.indexing.accumulo.entity;
 
+import java.nio.charset.StandardCharsets;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -25,6 +27,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.hadoop.io.Text;
 import org.apache.rya.accumulo.documentIndex.TextColumn;
 import org.apache.rya.api.domain.RyaType;
 import org.apache.rya.api.domain.RyaURI;
@@ -32,9 +36,6 @@ import org.apache.rya.api.resolver.RdfToRyaConversions;
 import org.apache.rya.api.resolver.RyaContext;
 import org.apache.rya.api.resolver.RyaTypeResolverException;
 import org.apache.rya.joinselect.AccumuloSelectivityEvalDAO;
-
-import org.apache.accumulo.core.client.TableNotFoundException;
-import org.apache.hadoop.io.Text;
 import org.openrdf.model.Value;
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.algebra.StatementPattern;
@@ -47,23 +48,23 @@ import com.google.common.primitives.Bytes;
 
 public class StarQuery {
 
-    private List<StatementPattern> nodes;
-    private TextColumn[] nodeColumnCond;
+    private final List<StatementPattern> nodes;
+    private final TextColumn[] nodeColumnCond;
     private String commonVarName;
     private Var commonVar;
     private Var context;
     private String contextURI ="";
-    private Map<String,Integer> varPos = Maps.newHashMap();
+    private final Map<String,Integer> varPos = Maps.newHashMap();
     private boolean isCommonVarURI = false;
 
 
-    public StarQuery(List<StatementPattern> nodes) {
+    public StarQuery(final List<StatementPattern> nodes) {
         this.nodes = nodes;
         if(nodes.size() == 0) {
             throw new IllegalArgumentException("Nodes cannot be empty!");
         }
         nodeColumnCond = new TextColumn[nodes.size()];
-        Var tempContext = nodes.get(0).getContextVar();
+        final Var tempContext = nodes.get(0).getContextVar();
         if(tempContext != null) {
             context = tempContext.clone();
         } else {
@@ -71,13 +72,13 @@ public class StarQuery {
         }
         try {
             this.init();
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             e.printStackTrace();
         }
     }
 
 
-    public StarQuery(Set<StatementPattern> nodes) {
+    public StarQuery(final Set<StatementPattern> nodes) {
         this(Lists.newArrayList(nodes));
     }
 
@@ -85,7 +86,7 @@ public class StarQuery {
         return nodes.size();
     }
 
-    public StarQuery(StarQuery other) {
+    public StarQuery(final StarQuery other) {
        this(other.nodes);
     }
 
@@ -153,7 +154,7 @@ public class StarQuery {
 
         Set<String> bindingNames = Sets.newHashSet();
 
-        for(StatementPattern sp: nodes) {
+        for(final StatementPattern sp: nodes) {
 
             if(bindingNames.size() == 0) {
                 bindingNames = sp.getBindingNames();
@@ -174,7 +175,7 @@ public class StarQuery {
 
         Set<String> bindingNames = Sets.newHashSet();
 
-        for(StatementPattern sp: nodes) {
+        for(final StatementPattern sp: nodes) {
 
             if(bindingNames.size() == 0) {
                 bindingNames = sp.getAssuredBindingNames();
@@ -194,13 +195,13 @@ public class StarQuery {
 
 
 
-    public CardinalityStatementPattern getMinCardSp(AccumuloSelectivityEvalDAO ase) {
+    public CardinalityStatementPattern getMinCardSp(final AccumuloSelectivityEvalDAO ase) {
 
         StatementPattern minSp = null;
         double cardinality = Double.MAX_VALUE;
         double tempCard = -1;
 
-        for (StatementPattern sp : nodes) {
+        for (final StatementPattern sp : nodes) {
 
             try {
                 tempCard = ase.getCardinality(ase.getConf(), sp);
@@ -209,7 +210,7 @@ public class StarQuery {
                     cardinality = tempCard;
                     minSp = sp;
                 }
-            } catch (TableNotFoundException e) {
+            } catch (final TableNotFoundException e) {
                 e.printStackTrace();
             }
 
@@ -223,10 +224,10 @@ public class StarQuery {
 
     public class CardinalityStatementPattern {
 
-        private StatementPattern sp;
-        private double cardinality;
+        private final StatementPattern sp;
+        private final double cardinality;
 
-        public CardinalityStatementPattern(StatementPattern sp, double cardinality) {
+        public CardinalityStatementPattern(final StatementPattern sp, final double cardinality) {
             this.sp = sp;
             this.cardinality = cardinality;
         }
@@ -242,7 +243,7 @@ public class StarQuery {
     }
 
 
-   public double getCardinality( AccumuloSelectivityEvalDAO ase) {
+   public double getCardinality( final AccumuloSelectivityEvalDAO ase) {
 
         double cardinality = Double.MAX_VALUE;
         double tempCard = -1;
@@ -263,7 +264,7 @@ public class StarQuery {
                 }
             }
 
-        } catch (Exception e) {
+        } catch (final Exception e) {
             e.printStackTrace();
         }
 
@@ -275,15 +276,15 @@ public class StarQuery {
 
 
 
-    public static Set<String> getCommonVars(StarQuery query, BindingSet bs) {
+    public static Set<String> getCommonVars(final StarQuery query, final BindingSet bs) {
 
-        Set<String> starQueryVarNames = Sets.newHashSet();
+        final Set<String> starQueryVarNames = Sets.newHashSet();
 
         if(bs == null || bs.size() == 0) {
             return Sets.newHashSet();
         }
 
-        Set<String> bindingNames = bs.getBindingNames();
+        final Set<String> bindingNames = bs.getBindingNames();
         starQueryVarNames.addAll(query.getUnCommonVars());
         if(!query.commonVarConstant()) {
             starQueryVarNames.add(query.getCommonVarName());
@@ -299,30 +300,30 @@ public class StarQuery {
 
 
 
-    public static StarQuery getConstrainedStarQuery(StarQuery query, BindingSet bs) {
+    public static StarQuery getConstrainedStarQuery(final StarQuery query, final BindingSet bs) {
 
         if(bs.size() == 0) {
             return query;
         }
 
-        Set<String> bindingNames = bs.getBindingNames();
-        Set<String> unCommonVarNames = query.getUnCommonVars();
-        Set<String> intersectVar = Sets.intersection(bindingNames, unCommonVarNames);
+        final Set<String> bindingNames = bs.getBindingNames();
+        final Set<String> unCommonVarNames = query.getUnCommonVars();
+        final Set<String> intersectVar = Sets.intersection(bindingNames, unCommonVarNames);
 
 
         if (!query.commonVarConstant()) {
 
-            Value v = bs.getValue(query.getCommonVarName());
+            final Value v = bs.getValue(query.getCommonVarName());
 
             if (v != null) {
                 query.commonVar.setValue(v);
             }
         }
 
-        for(String s: intersectVar) {
+        for(final String s: intersectVar) {
             try {
                 query.nodeColumnCond[query.varPos.get(s)] = query.setValue(query.nodeColumnCond[query.varPos.get(s)], bs.getValue(s));
-            } catch (RyaTypeResolverException e) {
+            } catch (final RyaTypeResolverException e) {
                 e.printStackTrace();
             }
         }
@@ -331,20 +332,20 @@ public class StarQuery {
     }
 
 
-    private TextColumn setValue(TextColumn tc, Value v) throws RyaTypeResolverException {
+    private TextColumn setValue(final TextColumn tc, final Value v) throws RyaTypeResolverException {
 
-        String cq = tc.getColumnQualifier().toString();
-        String[] cqArray = cq.split("\u0000");
+        final String cq = tc.getColumnQualifier().toString();
+        final String[] cqArray = cq.split("\u0000");
 
         if (cqArray[0].equals("subject")) {
             // RyaURI subjURI = (RyaURI) RdfToRyaConversions.convertValue(v);
             tc.setColumnQualifier(new Text("subject" + "\u0000" + v.stringValue()));
             tc.setIsPrefix(false);
         } else if (cqArray[0].equals("object")) {
-            RyaType objType = RdfToRyaConversions.convertValue(v);
-            byte[][] b1 = RyaContext.getInstance().serializeType(objType);
-            byte[] b2 = Bytes.concat("object".getBytes(),
-                    "\u0000".getBytes(), b1[0], b1[1]);
+            final RyaType objType = RdfToRyaConversions.convertValue(v);
+            final byte[][] b1 = RyaContext.getInstance().serializeType(objType);
+            final byte[] b2 = Bytes.concat("object".getBytes(StandardCharsets.UTF_8),
+                    "\u0000".getBytes(StandardCharsets.UTF_8), b1[0], b1[1]);
             tc.setColumnQualifier(new Text(b2));
             tc.setIsPrefix(false);
         } else {
@@ -359,15 +360,15 @@ public class StarQuery {
 
     //assumes nodes forms valid star query with only one common variable
     //assumes nodes and commonVar has been set
-    private TextColumn nodeToTextColumn(StatementPattern node, int i) throws RyaTypeResolverException {
+    private TextColumn nodeToTextColumn(final StatementPattern node, final int i) throws RyaTypeResolverException {
 
-        RyaContext rc = RyaContext.getInstance();
+        final RyaContext rc = RyaContext.getInstance();
 
-        Var subjVar = node.getSubjectVar();
-        Var predVar = node.getPredicateVar();
-        Var objVar = node.getObjectVar();
+        final Var subjVar = node.getSubjectVar();
+        final Var predVar = node.getPredicateVar();
+        final Var objVar = node.getObjectVar();
 
-        RyaURI predURI = (RyaURI) RdfToRyaConversions.convertValue(node.getPredicateVar().getValue());
+        final RyaURI predURI = (RyaURI) RdfToRyaConversions.convertValue(node.getPredicateVar().getValue());
 
 
         //assumes StatementPattern contains at least on variable
@@ -388,10 +389,10 @@ public class StarQuery {
             } else {
 
                 isCommonVarURI = true;
-                RyaType objType = RdfToRyaConversions.convertValue(objVar.getValue());
-                byte[][] b1 = rc.serializeType(objType);
+                final RyaType objType = RdfToRyaConversions.convertValue(objVar.getValue());
+                final byte[][] b1 = rc.serializeType(objType);
 
-                byte[] b2 = Bytes.concat("object".getBytes(), "\u0000".getBytes(), b1[0], b1[1]);
+                final byte[] b2 = Bytes.concat("object".getBytes(StandardCharsets.UTF_8), "\u0000".getBytes(StandardCharsets.UTF_8), b1[0], b1[1]);
                 return new TextColumn(new Text(predURI.getData()), new Text(b2));
             }
 
@@ -401,7 +402,7 @@ public class StarQuery {
                 isCommonVarURI = true;
                 varPos.put(objVar.getName(), i);
 
-                TextColumn tc = new TextColumn(new Text(predURI.getData()), new Text("object"));
+                final TextColumn tc = new TextColumn(new Text(predURI.getData()), new Text("object"));
                 tc.setIsPrefix(true);
                 return tc;
 
@@ -409,7 +410,7 @@ public class StarQuery {
 
                 varPos.put(subjVar.getName(), i);
 
-                TextColumn tc = new TextColumn(new Text(predURI.getData()), new Text("subject"));
+                final TextColumn tc = new TextColumn(new Text(predURI.getData()), new Text("subject"));
                 tc.setIsPrefix(true);
                 return tc;
 
@@ -437,7 +438,7 @@ public class StarQuery {
         }
 
         if(hasContext()) {
-            RyaURI ctxtURI = (RyaURI) RdfToRyaConversions.convertValue(context.getValue());
+            final RyaURI ctxtURI = (RyaURI) RdfToRyaConversions.convertValue(context.getValue());
             contextURI = ctxtURI.getData();
         }
 
@@ -456,14 +457,14 @@ public class StarQuery {
 
     // called after nodes set
     // assumes nodes forms valid query with single, common variable
-    private Var getCommonVar(List<StatementPattern> nodes) {
+    private Var getCommonVar(final List<StatementPattern> nodes) {
 
         Set<Var> vars = null;
-        List<Var> tempVar;
+        final List<Var> tempVar;
         Set<Var> tempSet;
 
         int i = 0;
-        for (StatementPattern sp : nodes) {
+        for (final StatementPattern sp : nodes) {
 
             if (vars == null) {
                 vars = Sets.newHashSet();
@@ -485,7 +486,7 @@ public class StarQuery {
 
             i = 0;
 
-            for (Var v : vars) {
+            for (final Var v : vars) {
                 i++;
 
                 if (i == 1) {
@@ -507,7 +508,7 @@ public class StarQuery {
 
 
     //assumes bindings is not of size 0
-    private static boolean isBindingsetValid(Set<String> bindings) {
+    private static boolean isBindingsetValid(final Set<String> bindings) {
 
         int varCount = 0;
 
@@ -516,7 +517,7 @@ public class StarQuery {
         } else {
 
 
-            for (String s : bindings) {
+            for (final String s : bindings) {
                 if (!s.startsWith("-const-")) {
                     varCount++;
                 }
@@ -535,7 +536,7 @@ public class StarQuery {
 
 
 
-    public static boolean isValidStarQuery(Collection<StatementPattern> nodes) {
+    public static boolean isValidStarQuery(final Collection<StatementPattern> nodes) {
 
         Set<String> bindings = null;
         boolean contextSet = false;
@@ -545,10 +546,10 @@ public class StarQuery {
             return false;
         }
 
-        for(StatementPattern sp: nodes) {
+        for(final StatementPattern sp: nodes) {
 
-            Var tempContext = sp.getContextVar();
-            Var predVar = sp.getPredicateVar();
+            final Var tempContext = sp.getContextVar();
+            final Var predVar = sp.getPredicateVar();
 
             //does not support variable context
             if(tempContext != null && !tempContext.isConstant()) {
@@ -617,7 +618,7 @@ public class StarQuery {
 
         String s = "Term conditions: " + "\n";
 
-        for (TextColumn element : this.nodeColumnCond) {
+        for (final TextColumn element : this.nodeColumnCond) {
             s = s + element.toString() + "\n";
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/iterators/AndingIterator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/iterators/AndingIterator.java b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/iterators/AndingIterator.java
index b63e0d2..f4c9436 100644
--- a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/iterators/AndingIterator.java
+++ b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/iterators/AndingIterator.java
@@ -1,5 +1,3 @@
-package org.apache.rya.indexing.accumulo.freetext.iterators;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.indexing.accumulo.freetext.iterators;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,10 +16,10 @@ package org.apache.rya.indexing.accumulo.freetext.iterators;
  * specific language governing permissions and limitations
  * under the License.
  */
-
-
+package org.apache.rya.indexing.accumulo.freetext.iterators;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Map;
@@ -49,27 +47,27 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 
 	protected Text nullText = new Text();
 
-	protected Text getPartition(Key key) {
+	protected Text getPartition(final Key key) {
 		return key.getRow();
 	}
 
-	protected Text getTerm(Key key) {
+	protected Text getTerm(final Key key) {
 		return key.getColumnFamily();
 	}
 
-	protected Text getDocID(Key key) {
+	protected Text getDocID(final Key key) {
 		return key.getColumnQualifier();
 	}
 
-	protected Key buildKey(Text partition, Text term) {
+	protected Key buildKey(final Text partition, final Text term) {
 		return new Key(partition, (term == null) ? nullText : term);
 	}
 
-	protected Key buildKey(Text partition, Text term, Text docID) {
+	protected Key buildKey(final Text partition, final Text term, final Text docID) {
 		return new Key(partition, (term == null) ? nullText : term, docID);
 	}
 
-	protected Key buildFollowingPartitionKey(Key key) {
+	protected Key buildFollowingPartitionKey(final Key key) {
 		return key.followingKey(PartialKey.ROW);
 	}
 
@@ -81,18 +79,18 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 		public Collection<ByteSequence> seekColfams;
 		public boolean notFlag;
 
-		public TermSource(TermSource other) {
+		public TermSource(final TermSource other) {
 			this.iter = other.iter;
 			this.term = other.term;
 			this.notFlag = other.notFlag;
 			this.seekColfams = other.seekColfams;
 		}
 
-		public TermSource(SortedKeyValueIterator<Key, Value> iter, Text term) {
+		public TermSource(final SortedKeyValueIterator<Key, Value> iter, final Text term) {
 			this(iter, term, false);
 		}
 
-		public TermSource(SortedKeyValueIterator<Key, Value> iter, Text term, boolean notFlag) {
+		public TermSource(final SortedKeyValueIterator<Key, Value> iter, final Text term, final boolean notFlag) {
 			this.iter = iter;
 			this.term = term;
 			this.notFlag = notFlag;
@@ -128,11 +126,11 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 	}
 
 	@Override
-	public SortedKeyValueIterator<Key, Value> deepCopy(IteratorEnvironment env) {
+	public SortedKeyValueIterator<Key, Value> deepCopy(final IteratorEnvironment env) {
 		return new AndingIterator(this, env);
 	}
 
-	private AndingIterator(AndingIterator other, IteratorEnvironment env) {
+	private AndingIterator(final AndingIterator other, final IteratorEnvironment env) {
 		if (other.sources != null) {
 			sourcesCount = other.sourcesCount;
 			sources = new TermSource[sourcesCount];
@@ -159,7 +157,7 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 	}
 
 	// precondition: currentRow is not null
-	private boolean seekOneSource(int sourceID) throws IOException {
+	private boolean seekOneSource(final int sourceID) throws IOException {
 		// find the next key in the appropriate column family that is at or beyond the cursor (currentRow, currentCQ)
 		// advance the cursor if this source goes beyond it
 		// return whether we advanced the cursor
@@ -189,13 +187,13 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 						break;
 					}
 				}
-				int partitionCompare = currentPartition.compareTo(getPartition(sources[sourceID].iter.getTopKey()));
+				final int partitionCompare = currentPartition.compareTo(getPartition(sources[sourceID].iter.getTopKey()));
 				// check if this source is already at or beyond currentRow
 				// if not, then seek to at least the current row
 
 				if (partitionCompare > 0) {
 					// seek to at least the currentRow
-					Key seekKey = buildKey(currentPartition, sources[sourceID].term);
+					final Key seekKey = buildKey(currentPartition, sources[sourceID].term);
 					sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
 					continue;
 				}
@@ -208,11 +206,11 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 				// now we must make sure we're in the right columnFamily in the current row
 				// Note: Iterators are auto-magically set to the correct columnFamily
 				if (sources[sourceID].term != null) {
-					int termCompare = sources[sourceID].term.compareTo(getTerm(sources[sourceID].iter.getTopKey()));
+					final int termCompare = sources[sourceID].term.compareTo(getTerm(sources[sourceID].iter.getTopKey()));
 					// check if this source is already on the right columnFamily
 					// if not, then seek forwards to the right columnFamily
 					if (termCompare > 0) {
-						Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
+						final Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
 						sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
 						continue;
 					}
@@ -225,8 +223,8 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 
 				// we have verified that we are in currentRow and the correct column family
 				// make sure we are at or beyond columnQualifier
-				Text docID = getDocID(sources[sourceID].iter.getTopKey());
-				int docIDCompare = currentDocID.compareTo(docID);
+				final Text docID = getDocID(sources[sourceID].iter.getTopKey());
+				final int docIDCompare = currentDocID.compareTo(docID);
 				// If we are past the target, this is a valid result
 				if (docIDCompare < 0) {
 					break;
@@ -234,7 +232,7 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 				// if this source is not yet at the currentCQ then advance in this source
 				if (docIDCompare > 0) {
 					// seek forwards
-					Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
+					final Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
 					sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
 					continue;
 				}
@@ -267,12 +265,12 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 						return true;
 					}
 				}
-				int partitionCompare = currentPartition.compareTo(getPartition(sources[sourceID].iter.getTopKey()));
+				final int partitionCompare = currentPartition.compareTo(getPartition(sources[sourceID].iter.getTopKey()));
 				// check if this source is already at or beyond currentRow
 				// if not, then seek to at least the current row
 				if (partitionCompare > 0) {
 					// seek to at least the currentRow
-					Key seekKey = buildKey(currentPartition, sources[sourceID].term);
+					final Key seekKey = buildKey(currentPartition, sources[sourceID].term);
 					sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
 					continue;
 				}
@@ -289,11 +287,11 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 				// Note: Iterators are auto-magically set to the correct columnFamily
 
 				if (sources[sourceID].term != null) {
-					int termCompare = sources[sourceID].term.compareTo(getTerm(sources[sourceID].iter.getTopKey()));
+					final int termCompare = sources[sourceID].term.compareTo(getTerm(sources[sourceID].iter.getTopKey()));
 					// check if this source is already on the right columnFamily
 					// if not, then seek forwards to the right columnFamily
 					if (termCompare > 0) {
-						Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
+						final Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
 						sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
 						continue;
 					}
@@ -313,15 +311,15 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 							// setting currentRow to null counts as advancing the cursor
 							return true;
 						}
-						Key seekKey = buildFollowingPartitionKey(sources[sourceID].iter.getTopKey());
+						final Key seekKey = buildFollowingPartitionKey(sources[sourceID].iter.getTopKey());
 						sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
 						continue;
 					}
 				}
 				// we have verified that we are in currentRow and the correct column family
 				// make sure we are at or beyond columnQualifier
-				Text docID = getDocID(sources[sourceID].iter.getTopKey());
-				int docIDCompare = currentDocID.compareTo(docID);
+				final Text docID = getDocID(sources[sourceID].iter.getTopKey());
+				final int docIDCompare = currentDocID.compareTo(docID);
 				// if this source has advanced beyond the current column qualifier then advance currentCQ and return true
 				if (docIDCompare < 0) {
 					currentDocID.set(docID);
@@ -331,7 +329,7 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 				// if this source is not yet at the currentCQ then seek in this source
 				if (docIDCompare > 0) {
 					// seek forwards
-					Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
+					final Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
 					sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
 					continue;
 				}
@@ -372,9 +370,10 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 		topKey = buildKey(currentPartition, nullText, currentDocID);
 	}
 
-	public static String stringTopKey(SortedKeyValueIterator<Key, Value> iter) {
-		if (iter.hasTop())
-			return iter.getTopKey().toString();
+	public static String stringTopKey(final SortedKeyValueIterator<Key, Value> iter) {
+		if (iter.hasTop()) {
+            return iter.getTopKey().toString();
+        }
 		return "";
 	}
 
@@ -387,10 +386,11 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 	 * @deprecated since 1.4. To be made protected. Do not interact with flags string directly, just use
 	 *             {@link #setColumnFamilies(IteratorSetting, Text[], boolean[])}.
 	 */
-	public static String encodeColumns(Text[] columns) {
-		StringBuilder sb = new StringBuilder();
-		for (int i = 0; i < columns.length; i++) {
-			sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i]))));
+	@Deprecated
+    public static String encodeColumns(final Text[] columns) {
+		final StringBuilder sb = new StringBuilder();
+		for (final Text column : columns) {
+			sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(column)), StandardCharsets.UTF_8));
 			sb.append('\n');
 		}
 		return sb.toString();
@@ -402,53 +402,58 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 	 * @deprecated since 1.4. To be made protected. Do not interact with flags string directly, just use
 	 *             {@link #setColumnFamilies(IteratorSetting, Text[], boolean[])}.
 	 */
-	public static String encodeBooleans(boolean[] flags) {
-		byte[] bytes = new byte[flags.length];
+	@Deprecated
+    public static String encodeBooleans(final boolean[] flags) {
+		final byte[] bytes = new byte[flags.length];
 		for (int i = 0; i < flags.length; i++) {
-			if (flags[i])
-				bytes[i] = 1;
-			else
-				bytes[i] = 0;
+			if (flags[i]) {
+                bytes[i] = 1;
+            } else {
+                bytes[i] = 0;
+            }
 		}
-		return new String(Base64.encodeBase64(bytes));
+		return new String(Base64.encodeBase64(bytes), StandardCharsets.UTF_8);
 	}
 
-	protected static Text[] decodeColumns(String columns) {
-		String[] columnStrings = columns.split("\n");
-		Text[] columnTexts = new Text[columnStrings.length];
+	protected static Text[] decodeColumns(final String columns) {
+		final String[] columnStrings = columns.split("\n");
+		final Text[] columnTexts = new Text[columnStrings.length];
 		for (int i = 0; i < columnStrings.length; i++) {
-			columnTexts[i] = new Text(Base64.decodeBase64(columnStrings[i].getBytes()));
+			columnTexts[i] = new Text(Base64.decodeBase64(columnStrings[i].getBytes(StandardCharsets.UTF_8)));
 		}
 		return columnTexts;
 	}
 
 	/**
 	 * to be made protected
-	 * 
+	 *
 	 * @param flags
 	 * @return decoded flags
 	 * @deprecated since 1.4. To be made protected. Do not interact with flags string directly, just use
 	 *             {@link #setColumnFamilies(IteratorSetting, Text[], boolean[])}.
 	 */
-	public static boolean[] decodeBooleans(String flags) {
+	@Deprecated
+    public static boolean[] decodeBooleans(final String flags) {
 		// return null of there were no flags
-		if (flags == null)
-			return null;
+		if (flags == null) {
+            return null;
+        }
 
-		byte[] bytes = Base64.decodeBase64(flags.getBytes());
-		boolean[] bFlags = new boolean[bytes.length];
+		final byte[] bytes = Base64.decodeBase64(flags.getBytes(StandardCharsets.UTF_8));
+		final boolean[] bFlags = new boolean[bytes.length];
 		for (int i = 0; i < bytes.length; i++) {
-			if (bytes[i] == 1)
-				bFlags[i] = true;
-			else
-				bFlags[i] = false;
+			if (bytes[i] == 1) {
+                bFlags[i] = true;
+            } else {
+                bFlags[i] = false;
+            }
 		}
 		return bFlags;
 	}
 
 	@Override
-	public void init(SortedKeyValueIterator<Key, Value> source, Map<String, String> options, IteratorEnvironment env) throws IOException {
-		Text[] terms = decodeColumns(options.get(columnFamiliesOptionName));
+	public void init(final SortedKeyValueIterator<Key, Value> source, final Map<String, String> options, final IteratorEnvironment env) throws IOException {
+		final Text[] terms = decodeColumns(options.get(columnFamiliesOptionName));
 		boolean[] notFlag = decodeBooleans(options.get(notFlagOptionName));
 
 		if (terms.length < 2) {
@@ -460,13 +465,14 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 		// And we are going to re-order such that the first term is not a ! term
 		if (notFlag == null) {
 			notFlag = new boolean[terms.length];
-			for (int i = 0; i < terms.length; i++)
-				notFlag[i] = false;
+			for (int i = 0; i < terms.length; i++) {
+                notFlag[i] = false;
+            }
 		}
 		if (notFlag[0]) {
 			for (int i = 1; i < notFlag.length; i++) {
 				if (notFlag[i] == false) {
-					Text swapFamily = new Text(terms[0]);
+					final Text swapFamily = new Text(terms[0]);
 					terms[0].set(terms[i]);
 					terms[i].set(swapFamily);
 					notFlag[0] = false;
@@ -488,7 +494,7 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 	}
 
 	@Override
-	public void seek(Range range, Collection<ByteSequence> seekColumnFamilies, boolean inclusive) throws IOException {
+	public void seek(final Range range, final Collection<ByteSequence> seekColumnFamilies, final boolean inclusive) throws IOException {
 		overallRange = new Range(range);
 		currentPartition = new Text();
 		currentDocID.set(emptyByteArray);
@@ -512,16 +518,16 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 		advanceToIntersection();
 	}
 
-	public void addSource(SortedKeyValueIterator<Key, Value> source, IteratorEnvironment env, Text term, boolean notFlag) {
+	public void addSource(final SortedKeyValueIterator<Key, Value> source, final IteratorEnvironment env, final Text term, final boolean notFlag) {
 		// Check if we have space for the added Source
 		if (sources == null) {
 			sources = new TermSource[1];
 		} else {
 			// allocate space for node, and copy current tree.
 			// TODO: Should we change this to an ArrayList so that we can just add() ?
-			TermSource[] localSources = new TermSource[sources.length + 1];
+			final TermSource[] localSources = new TermSource[sources.length + 1];
 			int currSource = 0;
-			for (TermSource myTerm : sources) {
+			for (final TermSource myTerm : sources) {
 				// TODO: Do I need to call new here? or can I just re-use the term?
 				localSources[currSource] = new TermSource(myTerm);
 				currSource++;
@@ -534,29 +540,32 @@ public class AndingIterator implements SortedKeyValueIterator<Key, Value> {
 
 	/**
 	 * Encode the columns to be used when iterating.
-	 * 
+	 *
 	 * @param cfg
 	 * @param columns
 	 */
-	public static void setColumnFamilies(IteratorSetting cfg, Text[] columns) {
-		if (columns.length < 2)
-			throw new IllegalArgumentException("Must supply at least two terms to intersect");
+	public static void setColumnFamilies(final IteratorSetting cfg, final Text[] columns) {
+		if (columns.length < 2) {
+            throw new IllegalArgumentException("Must supply at least two terms to intersect");
+        }
 		cfg.addOption(AndingIterator.columnFamiliesOptionName, AndingIterator.encodeColumns(columns));
 	}
 
 	/**
 	 * Encode columns and NOT flags indicating which columns should be negated (docIDs will be excluded if matching negated columns, instead
 	 * of included).
-	 * 
+	 *
 	 * @param cfg
 	 * @param columns
 	 * @param notFlags
 	 */
-	public static void setColumnFamilies(IteratorSetting cfg, Text[] columns, boolean[] notFlags) {
-		if (columns.length < 2)
-			throw new IllegalArgumentException("Must supply at least two terms to intersect");
-		if (columns.length != notFlags.length)
-			throw new IllegalArgumentException("columns and notFlags arrays must be the same length");
+	public static void setColumnFamilies(final IteratorSetting cfg, final Text[] columns, final boolean[] notFlags) {
+		if (columns.length < 2) {
+            throw new IllegalArgumentException("Must supply at least two terms to intersect");
+        }
+		if (columns.length != notFlags.length) {
+            throw new IllegalArgumentException("columns and notFlags arrays must be the same length");
+        }
 		setColumnFamilies(cfg, columns);
 		cfg.addOption(AndingIterator.notFlagOptionName, AndingIterator.encodeBooleans(notFlags));
 	}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/SimpleCharStream.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/SimpleCharStream.java b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/SimpleCharStream.java
index 5d69967..3bc9164 100644
--- a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/SimpleCharStream.java
+++ b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/SimpleCharStream.java
@@ -1,887 +1,3 @@
-/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 5.0 */
-/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
-package org.apache.rya.indexing.accumulo.freetext.query;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -890,9 +6,9 @@ package org.apache.rya.indexing.accumulo.freetext.query;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -900,8 +16,11 @@ package org.apache.rya.indexing.accumulo.freetext.query;
  * specific language governing permissions and limitations
  * under the License.
  */
+/* Generated By:JavaCC: Do not edit this line. SimpleCharStream.java Version 5.0 */
+/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
+package org.apache.rya.indexing.accumulo.freetext.query;
 
-
+import java.nio.charset.StandardCharsets;
 
 /**
  * An implementation of interface CharStream, where the stream is assumed to
@@ -933,15 +52,15 @@ public class SimpleCharStream
   protected int inBuf = 0;
   protected int tabSize = 8;
 
-  protected void setTabSize(int i) { tabSize = i; }
-  protected int getTabSize(int i) { return tabSize; }
+  protected void setTabSize(final int i) { tabSize = i; }
+  protected int getTabSize(final int i) { return tabSize; }
 
 
-  protected void ExpandBuff(boolean wrapAround)
+  protected void ExpandBuff(final boolean wrapAround)
   {
-    char[] newbuffer = new char[bufsize + 2048];
-    int newbufline[] = new int[bufsize + 2048];
-    int newbufcolumn[] = new int[bufsize + 2048];
+    final char[] newbuffer = new char[bufsize + 2048];
+    final int newbufline[] = new int[bufsize + 2048];
+    final int newbufcolumn[] = new int[bufsize + 2048];
 
     try
     {
@@ -975,7 +94,7 @@ public class SimpleCharStream
         maxNextCharInd = (bufpos -= tokenBegin);
       }
     }
-    catch (Throwable t)
+    catch (final Throwable t)
     {
       throw new Error(t.getMessage());
     }
@@ -997,18 +116,20 @@ public class SimpleCharStream
           bufpos = maxNextCharInd = 0;
           available = tokenBegin;
         }
-        else if (tokenBegin < 0)
-          bufpos = maxNextCharInd = 0;
-        else
-          ExpandBuff(false);
+        else if (tokenBegin < 0) {
+            bufpos = maxNextCharInd = 0;
+        } else {
+            ExpandBuff(false);
+        }
       }
-      else if (available > tokenBegin)
+      else if (available > tokenBegin) {
         available = bufsize;
-      else if ((tokenBegin - available) < 2048)
+    } else if ((tokenBegin - available) < 2048) {
         ExpandBuff(true);
-      else
+    } else {
         available = tokenBegin;
     }
+    }
 
     int i;
     try {
@@ -1016,16 +137,17 @@ public class SimpleCharStream
       {
         inputStream.close();
         throw new java.io.IOException();
-      }
-      else
+      } else {
         maxNextCharInd += i;
+    }
       return;
     }
-    catch(java.io.IOException e) {
+    catch(final java.io.IOException e) {
       --bufpos;
       backup(0);
-      if (tokenBegin == -1)
+      if (tokenBegin == -1) {
         tokenBegin = bufpos;
+    }
       throw e;
     }
   }
@@ -1034,13 +156,13 @@ public class SimpleCharStream
   public char BeginToken() throws java.io.IOException
   {
     tokenBegin = -1;
-    char c = readChar();
+    final char c = readChar();
     tokenBegin = bufpos;
 
     return c;
   }
 
-  protected void UpdateLineColumn(char c)
+  protected void UpdateLineColumn(final char c)
   {
     column++;
 
@@ -1055,10 +177,10 @@ public class SimpleCharStream
       if (c == '\n')
       {
         prevCharIsLF = true;
-      }
-      else
+      } else {
         line += (column = 1);
     }
+    }
 
     switch (c)
     {
@@ -1087,16 +209,18 @@ public class SimpleCharStream
     {
       --inBuf;
 
-      if (++bufpos == bufsize)
+      if (++bufpos == bufsize) {
         bufpos = 0;
+    }
 
       return buffer[bufpos];
     }
 
-    if (++bufpos >= maxNextCharInd)
-      FillBuff();
+    if (++bufpos >= maxNextCharInd) {
+        FillBuff();
+    }
 
-    char c = buffer[bufpos];
+    final char c = buffer[bufpos];
 
     UpdateLineColumn(c);
     return c;
@@ -1143,16 +267,17 @@ public class SimpleCharStream
   }
 
 /** Backup a number of characters. */
-  public void backup(int amount) {
+  public void backup(final int amount) {
 
     inBuf += amount;
-    if ((bufpos -= amount) < 0)
-      bufpos += bufsize;
+    if ((bufpos -= amount) < 0) {
+        bufpos += bufsize;
+    }
   }
 
   /** Constructor. */
-  public SimpleCharStream(java.io.Reader dstream, int startline,
-  int startcolumn, int buffersize)
+  public SimpleCharStream(final java.io.Reader dstream, final int startline,
+  final int startcolumn, final int buffersize)
   {
     inputStream = dstream;
     line = startline;
@@ -1165,21 +290,21 @@ public class SimpleCharStream
   }
 
   /** Constructor. */
-  public SimpleCharStream(java.io.Reader dstream, int startline,
-                          int startcolumn)
+  public SimpleCharStream(final java.io.Reader dstream, final int startline,
+                          final int startcolumn)
   {
     this(dstream, startline, startcolumn, 4096);
   }
 
   /** Constructor. */
-  public SimpleCharStream(java.io.Reader dstream)
+  public SimpleCharStream(final java.io.Reader dstream)
   {
     this(dstream, 1, 1, 4096);
   }
 
   /** Reinitialise. */
-  public void ReInit(java.io.Reader dstream, int startline,
-  int startcolumn, int buffersize)
+  public void ReInit(final java.io.Reader dstream, final int startline,
+  final int startcolumn, final int buffersize)
   {
     inputStream = dstream;
     line = startline;
@@ -1198,112 +323,113 @@ public class SimpleCharStream
   }
 
   /** Reinitialise. */
-  public void ReInit(java.io.Reader dstream, int startline,
-                     int startcolumn)
+  public void ReInit(final java.io.Reader dstream, final int startline,
+                     final int startcolumn)
   {
     ReInit(dstream, startline, startcolumn, 4096);
   }
 
   /** Reinitialise. */
-  public void ReInit(java.io.Reader dstream)
+  public void ReInit(final java.io.Reader dstream)
   {
     ReInit(dstream, 1, 1, 4096);
   }
   /** Constructor. */
-  public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
-  int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
+  public SimpleCharStream(final java.io.InputStream dstream, final String encoding, final int startline,
+  final int startcolumn, final int buffersize) throws java.io.UnsupportedEncodingException
   {
-    this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
+    this(encoding == null ? new java.io.InputStreamReader(dstream, StandardCharsets.UTF_8) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
   }
 
   /** Constructor. */
-  public SimpleCharStream(java.io.InputStream dstream, int startline,
-  int startcolumn, int buffersize)
+  public SimpleCharStream(final java.io.InputStream dstream, final int startline,
+  final int startcolumn, final int buffersize)
   {
-    this(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
+    this(new java.io.InputStreamReader(dstream, StandardCharsets.UTF_8), startline, startcolumn, buffersize);
   }
 
   /** Constructor. */
-  public SimpleCharStream(java.io.InputStream dstream, String encoding, int startline,
-                          int startcolumn) throws java.io.UnsupportedEncodingException
+  public SimpleCharStream(final java.io.InputStream dstream, final String encoding, final int startline,
+                          final int startcolumn) throws java.io.UnsupportedEncodingException
   {
     this(dstream, encoding, startline, startcolumn, 4096);
   }
 
   /** Constructor. */
-  public SimpleCharStream(java.io.InputStream dstream, int startline,
-                          int startcolumn)
+  public SimpleCharStream(final java.io.InputStream dstream, final int startline,
+                          final int startcolumn)
   {
     this(dstream, startline, startcolumn, 4096);
   }
 
   /** Constructor. */
-  public SimpleCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
+  public SimpleCharStream(final java.io.InputStream dstream, final String encoding) throws java.io.UnsupportedEncodingException
   {
     this(dstream, encoding, 1, 1, 4096);
   }
 
   /** Constructor. */
-  public SimpleCharStream(java.io.InputStream dstream)
+  public SimpleCharStream(final java.io.InputStream dstream)
   {
     this(dstream, 1, 1, 4096);
   }
 
   /** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, String encoding, int startline,
-                          int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
+  public void ReInit(final java.io.InputStream dstream, final String encoding, final int startline,
+                          final int startcolumn, final int buffersize) throws java.io.UnsupportedEncodingException
   {
-    ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
+    ReInit(encoding == null ? new java.io.InputStreamReader(dstream, StandardCharsets.UTF_8) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
   }
 
   /** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, int startline,
-                          int startcolumn, int buffersize)
+  public void ReInit(final java.io.InputStream dstream, final int startline,
+                          final int startcolumn, final int buffersize)
   {
-    ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
+    ReInit(new java.io.InputStreamReader(dstream, StandardCharsets.UTF_8), startline, startcolumn, buffersize);
   }
 
   /** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
+  public void ReInit(final java.io.InputStream dstream, final String encoding) throws java.io.UnsupportedEncodingException
   {
     ReInit(dstream, encoding, 1, 1, 4096);
   }
 
   /** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream)
+  public void ReInit(final java.io.InputStream dstream)
   {
     ReInit(dstream, 1, 1, 4096);
   }
   /** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, String encoding, int startline,
-                     int startcolumn) throws java.io.UnsupportedEncodingException
+  public void ReInit(final java.io.InputStream dstream, final String encoding, final int startline,
+                     final int startcolumn) throws java.io.UnsupportedEncodingException
   {
     ReInit(dstream, encoding, startline, startcolumn, 4096);
   }
   /** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, int startline,
-                     int startcolumn)
+  public void ReInit(final java.io.InputStream dstream, final int startline,
+                     final int startcolumn)
   {
     ReInit(dstream, startline, startcolumn, 4096);
   }
   /** Get token literal value. */
   public String GetImage()
   {
-    if (bufpos >= tokenBegin)
-      return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
-    else
-      return new String(buffer, tokenBegin, bufsize - tokenBegin) +
-                            new String(buffer, 0, bufpos + 1);
+    if (bufpos >= tokenBegin) {
+        return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
+    } else {
+        return new String(buffer, tokenBegin, bufsize - tokenBegin) +
+                                new String(buffer, 0, bufpos + 1);
+    }
   }
 
   /** Get the suffix. */
-  public char[] GetSuffix(int len)
+  public char[] GetSuffix(final int len)
   {
-    char[] ret = new char[len];
+    final char[] ret = new char[len];
 
-    if ((bufpos + 1) >= len)
-      System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
-    else
+    if ((bufpos + 1) >= len) {
+        System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
+    } else
     {
       System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
                                                         len - bufpos - 1);
@@ -1324,7 +450,7 @@ public class SimpleCharStream
   /**
    * Method to adjust line and column numbers for the start of a token.
    */
-  public void adjustBeginLineColumn(int newLine, int newCol)
+  public void adjustBeginLineColumn(int newLine, final int newCol)
   {
     int start = tokenBegin;
     int len;
@@ -1357,10 +483,11 @@ public class SimpleCharStream
 
       while (i++ < len)
       {
-        if (bufline[j = start % bufsize] != bufline[++start % bufsize])
-          bufline[j] = newLine++;
-        else
-          bufline[j] = newLine;
+        if (bufline[j = start % bufsize] != bufline[++start % bufsize]) {
+            bufline[j] = newLine++;
+        } else {
+            bufline[j] = newLine;
+        }
       }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/indexingExample/src/main/java/RyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/RyaDirectExample.java b/extras/indexingExample/src/main/java/RyaDirectExample.java
index d2aa1f6..eed1a22 100644
--- a/extras/indexingExample/src/main/java/RyaDirectExample.java
+++ b/extras/indexingExample/src/main/java/RyaDirectExample.java
@@ -18,20 +18,16 @@
  */
 
 import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.TableExistsException;
 import org.apache.accumulo.core.client.TableNotFoundException;
 import org.apache.accumulo.core.client.mock.MockInstance;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.commons.lang.Validate;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
@@ -153,7 +149,7 @@ public class RyaDirectExample {
 
 	private static Configuration getConf() {
 
-		
+
 		return AccumuloIndexingConfiguration.builder()
 			.setUseMockAccumulo(USE_MOCK_INSTANCE)
 			.setAuths(AUTHS)
@@ -165,7 +161,7 @@ public class RyaDirectExample {
 			.setUseAccumuloFreetextIndex(true)
 			.setUseAccumuloTemporalIndex(true)
 			.build();
-		
+
 	}
 
 	public static void testAddAndDelete(final SailRepositoryConnection conn)
@@ -775,7 +771,7 @@ public class RyaDirectExample {
 			final String tablename2 = RYA_TABLE_PREFIX + "INDEX_2";
 
 			final Connector accCon = new MockInstance(INSTANCE).getConnector(
-					"root", new PasswordToken("".getBytes()));
+					"root", new PasswordToken("".getBytes(StandardCharsets.UTF_8)));
 
 			new PcjTables().createAndPopulatePcj(conn, accCon, tablename1,
 					queryString1, new String[] { "e", "c", "l", "o" },
@@ -784,7 +780,7 @@ public class RyaDirectExample {
 			new PcjTables().createAndPopulatePcj(conn, accCon, tablename2,
 					queryString2, new String[] { "e", "c", "l", "o" },
 					Optional.<PcjVarOrderFactory> absent());
-			
+
 		} catch (final RyaDAOException e) {
 			throw new Error("While creating PCJ tables.",e);
 		} finally {


[03/11] incubator-rya git commit: RYA-401 Fixed all default charset bugs. Closes #243.

Posted by ca...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/mapreduce/src/main/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountTool.java
----------------------------------------------------------------------
diff --git a/mapreduce/src/main/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountTool.java b/mapreduce/src/main/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountTool.java
index a1c84aa..5adb893 100644
--- a/mapreduce/src/main/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountTool.java
+++ b/mapreduce/src/main/java/org/apache/rya/accumulo/mr/tools/AccumuloRdfCountTool.java
@@ -22,19 +22,9 @@ package org.apache.rya.accumulo.mr.tools;
 
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Date;
 
-import org.apache.rya.accumulo.AccumuloRdfConfiguration;
-import org.apache.rya.accumulo.AccumuloRdfConstants;
-import org.apache.rya.accumulo.mr.AbstractAccumuloMRTool;
-import org.apache.rya.accumulo.mr.MRUtils;
-import org.apache.rya.api.RdfCloudTripleStoreConstants;
-import org.apache.rya.api.domain.RyaStatement;
-import org.apache.rya.api.domain.RyaURI;
-import org.apache.rya.api.resolver.RyaTripleContext;
-import org.apache.rya.api.resolver.triple.TripleRow;
-import org.apache.rya.api.resolver.triple.TripleRowResolverException;
-
 import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Mutation;
@@ -49,6 +39,16 @@ import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.Reducer;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.rya.accumulo.AccumuloRdfConfiguration;
+import org.apache.rya.accumulo.AccumuloRdfConstants;
+import org.apache.rya.accumulo.mr.AbstractAccumuloMRTool;
+import org.apache.rya.accumulo.mr.MRUtils;
+import org.apache.rya.api.RdfCloudTripleStoreConstants;
+import org.apache.rya.api.domain.RyaStatement;
+import org.apache.rya.api.domain.RyaURI;
+import org.apache.rya.api.resolver.RyaTripleContext;
+import org.apache.rya.api.resolver.triple.TripleRow;
+import org.apache.rya.api.resolver.triple.TripleRowResolverException;
 import org.openrdf.model.ValueFactory;
 import org.openrdf.model.impl.ValueFactoryImpl;
 
@@ -64,13 +64,14 @@ import com.google.common.io.ByteStreams;
  * Time: 10:39:40 AM
  * @deprecated
  */
+@Deprecated
 public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool {
 
-    public static void main(String[] args) {
+    public static void main(final String[] args) {
         try {
 
             ToolRunner.run(new Configuration(), new AccumuloRdfCountTool(), args);
-        } catch (Exception e) {
+        } catch (final Exception e) {
             e.printStackTrace();
         }
     }
@@ -80,13 +81,13 @@ public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool
      */
 
     @Override
-    public int run(String[] strings) throws Exception {
+    public int run(final String[] strings) throws Exception {
         conf.set(MRUtils.JOB_NAME_PROP, "Gather Evaluation Statistics");
 
         //initialize
         init();
 
-        Job job = new Job(conf);
+        final Job job = new Job(conf);
         job.setJarByClass(AccumuloRdfCountTool.class);
         setupAccumuloInput(job);
 
@@ -102,16 +103,16 @@ public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool
         job.setCombinerClass(CountPiecesCombiner.class);
         job.setReducerClass(CountPiecesReducer.class);
 
-        String outputTable = MRUtils.getTablePrefix(conf) + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX;
+        final String outputTable = MRUtils.getTablePrefix(conf) + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX;
         setupAccumuloOutput(job, outputTable);
 
         // Submit the job
-        Date startTime = new Date();
+        final Date startTime = new Date();
         System.out.println("Job started: " + startTime);
-        int exitCode = job.waitForCompletion(true) ? 0 : 1;
+        final int exitCode = job.waitForCompletion(true) ? 0 : 1;
 
         if (exitCode == 0) {
-            Date end_time = new Date();
+            final Date end_time = new Date();
             System.out.println("Job ended: " + end_time);
             System.out.println("The job took "
                     + (end_time.getTime() - startTime.getTime()) / 1000
@@ -131,38 +132,39 @@ public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool
 
         ValueFactoryImpl vf = new ValueFactoryImpl();
 
-        private Text keyOut = new Text();
-        private LongWritable valOut = new LongWritable(1);
+        private final Text keyOut = new Text();
+        private final LongWritable valOut = new LongWritable(1);
         private RyaTripleContext ryaContext;
 
         @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
+        protected void setup(final Context context) throws IOException, InterruptedException {
             super.setup(context);
-            Configuration conf = context.getConfiguration();
+            final Configuration conf = context.getConfiguration();
             tableLayout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.valueOf(
                     conf.get(MRUtils.TABLE_LAYOUT_PROP, RdfCloudTripleStoreConstants.TABLE_LAYOUT.OSP.toString()));
             ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration(conf));
         }
 
         @Override
-        protected void map(Key key, Value value, Context context) throws IOException, InterruptedException {
+        protected void map(final Key key, final Value value, final Context context) throws IOException, InterruptedException {
             try {
-                RyaStatement statement = ryaContext.deserializeTriple(tableLayout, new TripleRow(key.getRow().getBytes(), key.getColumnFamily().getBytes(), key.getColumnQualifier().getBytes()));
+                final RyaStatement statement = ryaContext.deserializeTriple(tableLayout, new TripleRow(key.getRow().getBytes(), key.getColumnFamily().getBytes(), key.getColumnQualifier().getBytes()));
                 //count each piece subject, pred, object
 
-                String subj = statement.getSubject().getData();
-                String pred = statement.getPredicate().getData();
+                final String subj = statement.getSubject().getData();
+                final String pred = statement.getPredicate().getData();
 //                byte[] objBytes = tripleFormat.getValueFormat().serialize(statement.getObject());
-                RyaURI scontext = statement.getContext();
-                boolean includesContext = scontext != null;
-                String scontext_str = (includesContext) ? scontext.getData() : null;
+                final RyaURI scontext = statement.getContext();
+                final boolean includesContext = scontext != null;
+                final String scontext_str = (includesContext) ? scontext.getData() : null;
 
                 ByteArrayDataOutput output = ByteStreams.newDataOutput();
                 output.writeUTF(subj);
                 output.writeUTF(RdfCloudTripleStoreConstants.SUBJECT_CF);
                 output.writeBoolean(includesContext);
-                if (includesContext)
+                if (includesContext) {
                     output.writeUTF(scontext_str);
+                }
                 keyOut.set(output.toByteArray());
                 context.write(keyOut, valOut);
 
@@ -170,11 +172,12 @@ public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool
                 output.writeUTF(pred);
                 output.writeUTF(RdfCloudTripleStoreConstants.PRED_CF);
                 output.writeBoolean(includesContext);
-                if (includesContext)
+                if (includesContext) {
                     output.writeUTF(scontext_str);
+                }
                 keyOut.set(output.toByteArray());
                 context.write(keyOut, valOut);
-            } catch (TripleRowResolverException e) {
+            } catch (final TripleRowResolverException e) {
                 throw new IOException(e);
             }
         }
@@ -182,21 +185,22 @@ public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool
 
     public static class CountPiecesCombiner extends Reducer<Text, LongWritable, Text, LongWritable> {
 
-        private LongWritable valOut = new LongWritable();
+        private final LongWritable valOut = new LongWritable();
 
         // TODO: can still add up to be large I guess
         // any count lower than this does not need to be saved
         public static final int TOO_LOW = 2;
 
         @Override
-        protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
+        protected void reduce(final Text key, final Iterable<LongWritable> values, final Context context) throws IOException, InterruptedException {
             long count = 0;
-            for (LongWritable lw : values) {
+            for (final LongWritable lw : values) {
                 count += lw.get();
             }
 
-            if (count <= TOO_LOW)
+            if (count <= TOO_LOW) {
                 return;
+            }
 
             valOut.set(count);
             context.write(key, valOut);
@@ -218,38 +222,40 @@ public class AccumuloRdfCountTool extends AbstractAccumuloMRTool implements Tool
         private ColumnVisibility cv = AccumuloRdfConstants.EMPTY_CV;
 
         @Override
-        protected void setup(Context context) throws IOException, InterruptedException {
+        protected void setup(final Context context) throws IOException, InterruptedException {
             super.setup(context);
             tablePrefix = context.getConfiguration().get(MRUtils.TABLE_PREFIX_PROPERTY, RdfCloudTripleStoreConstants.TBL_PRFX_DEF);
             table = new Text(tablePrefix + RdfCloudTripleStoreConstants.TBL_EVAL_SUFFIX);
             final String cv_s = context.getConfiguration().get(MRUtils.AC_CV_PROP);
-            if (cv_s != null)
+            if (cv_s != null) {
                 cv = new ColumnVisibility(cv_s);
+            }
         }
 
         @Override
-        protected void reduce(Text key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException {
+        protected void reduce(final Text key, final Iterable<LongWritable> values, final Context context) throws IOException, InterruptedException {
             long count = 0;
-            for (LongWritable lw : values) {
+            for (final LongWritable lw : values) {
                 count += lw.get();
             }
 
-            if (count <= TOO_LOW)
+            if (count <= TOO_LOW) {
                 return;
+            }
 
-            ByteArrayDataInput badi = ByteStreams.newDataInput(key.getBytes());
-            String v = badi.readUTF();
+            final ByteArrayDataInput badi = ByteStreams.newDataInput(key.getBytes());
+            final String v = badi.readUTF();
             cat_txt.set(badi.readUTF());
 
             Text columnQualifier = RdfCloudTripleStoreConstants.EMPTY_TEXT;
-            boolean includesContext = badi.readBoolean();
+            final boolean includesContext = badi.readBoolean();
             if (includesContext) {
                 columnQualifier = new Text(badi.readUTF());
             }
 
             row.set(v);
-            Mutation m = new Mutation(row);
-            v_out.set((count + "").getBytes());
+            final Mutation m = new Mutation(row);
+            v_out.set((count + "").getBytes(StandardCharsets.UTF_8));
             m.put(cat_txt, columnQualifier, cv, v_out);
             context.write(table, m);
         }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/osgi/camel.rya/src/main/java/org/apache/rya/camel/cbsail/CbSailProducer.java
----------------------------------------------------------------------
diff --git a/osgi/camel.rya/src/main/java/org/apache/rya/camel/cbsail/CbSailProducer.java b/osgi/camel.rya/src/main/java/org/apache/rya/camel/cbsail/CbSailProducer.java
index e570ce5..eba4b3d 100644
--- a/osgi/camel.rya/src/main/java/org/apache/rya/camel/cbsail/CbSailProducer.java
+++ b/osgi/camel.rya/src/main/java/org/apache/rya/camel/cbsail/CbSailProducer.java
@@ -1,5 +1,18 @@
 package org.apache.rya.camel.cbsail;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConfiguration.CONF_INFER;
+import static org.apache.rya.api.RdfCloudTripleStoreConfiguration.CONF_QUERY_AUTH;
+import static org.apache.rya.camel.cbsail.CbSailComponent.SPARQL_QUERY_PROP;
+import static org.apache.rya.camel.cbsail.CbSailComponent.valueFactory;
+
+import java.io.ByteArrayOutputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +21,9 @@ package org.apache.rya.camel.cbsail;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -24,28 +37,27 @@ package org.apache.rya.camel.cbsail;
 import org.apache.camel.Exchange;
 import org.apache.camel.impl.DefaultProducer;
 import org.openrdf.model.Statement;
-import org.openrdf.query.*;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandlerBase;
+import org.openrdf.query.TupleQueryResultHandlerException;
 import org.openrdf.query.resultio.sparqlxml.SPARQLResultsXMLWriter;
 import org.openrdf.repository.RepositoryConnection;
 import org.openrdf.repository.RepositoryException;
 import org.openrdf.rio.RDFHandlerException;
 
-import java.io.ByteArrayOutputStream;
-import java.util.*;
-
-import static org.apache.rya.api.RdfCloudTripleStoreConfiguration.*;
-import static org.apache.rya.camel.cbsail.CbSailComponent.SPARQL_QUERY_PROP;
-import static org.apache.rya.camel.cbsail.CbSailComponent.valueFactory;
-
 /**
  */
 public class CbSailProducer extends DefaultProducer {
 
     private RepositoryConnection connection;
 
-    private CbSailEndpoint.CbSailOutput queryOutput = CbSailEndpoint.CbSailOutput.BINARY;
+    private final CbSailEndpoint.CbSailOutput queryOutput = CbSailEndpoint.CbSailOutput.BINARY;
 
-    public CbSailProducer(CbSailEndpoint endpoint) {
+    public CbSailProducer(final CbSailEndpoint endpoint) {
         super(endpoint);
     }
 
@@ -53,78 +65,83 @@ public class CbSailProducer extends DefaultProducer {
     public void process(final Exchange exchange) throws Exception {
         //If a query is set in the header or uri, use it
         Collection<String> queries = new ArrayList<String>();
-        Collection tmp = exchange.getIn().getHeader(SPARQL_QUERY_PROP, Collection.class);
+        final Collection tmp = exchange.getIn().getHeader(SPARQL_QUERY_PROP, Collection.class);
         if (tmp != null) {
             queries = tmp;
         } else {
-            String query = exchange.getIn().getHeader(SPARQL_QUERY_PROP, String.class);
+            final String query = exchange.getIn().getHeader(SPARQL_QUERY_PROP, String.class);
             if (query != null) {
                 queries.add(query);
             }
         }
 
-        if (queries.size() > 0)
+        if (queries.size() > 0) {
             sparqlQuery(exchange, queries);
-        else
+        } else {
             inputTriples(exchange);
+        }
     }
 
-    protected void inputTriples(Exchange exchange) throws RepositoryException {
-        Object body = exchange.getIn().getBody();
+    protected void inputTriples(final Exchange exchange) throws RepositoryException {
+        final Object body = exchange.getIn().getBody();
         if (body instanceof Statement) {
             //save statement
             inputStatement((Statement) body);
         } else if (body instanceof List) {
             //save list of statements
-            List lst = (List) body;
-            for (Object obj : lst) {
-                if (obj instanceof Statement)
+            final List lst = (List) body;
+            for (final Object obj : lst) {
+                if (obj instanceof Statement) {
                     inputStatement((Statement) obj);
+                }
             }
         }
         connection.commit();
         exchange.getOut().setBody(Boolean.TRUE);
     }
 
-    protected void inputStatement(Statement stmt) throws RepositoryException {
+    protected void inputStatement(final Statement stmt) throws RepositoryException {
         connection.add(stmt.getSubject(), stmt.getPredicate(), stmt.getObject());
     }
 
-    protected void sparqlQuery(Exchange exchange, Collection<String> queries) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException, RDFHandlerException {
+    protected void sparqlQuery(final Exchange exchange, final Collection<String> queries) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException, RDFHandlerException {
 
-        List list = new ArrayList();
-        for (String query : queries) {
+        final List list = new ArrayList();
+        for (final String query : queries) {
 
 //            Long startTime = exchange.getIn().getHeader(START_TIME_QUERY_PROP, Long.class);
 //            Long ttl = exchange.getIn().getHeader(TTL_QUERY_PROP, Long.class);
-            String auth = exchange.getIn().getHeader(CONF_QUERY_AUTH, String.class);
-            Boolean infer = exchange.getIn().getHeader(CONF_INFER, Boolean.class);
+            final String auth = exchange.getIn().getHeader(CONF_QUERY_AUTH, String.class);
+            final Boolean infer = exchange.getIn().getHeader(CONF_INFER, Boolean.class);
 
-            Object output = performSelect(query, auth, infer);
+            final Object output = performSelect(query, auth, infer);
             if (queries.size() == 1) {
                 exchange.getOut().setBody(output);
                 return;
-            } else
+            } else {
                 list.add(output);
+            }
 
         }
         exchange.getOut().setBody(list);
     }
 
-    protected Object performSelect(String query, String auth, Boolean infer) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException {
-        TupleQuery tupleQuery = connection.prepareTupleQuery(
+    protected Object performSelect(final String query, final String auth, final Boolean infer) throws RepositoryException, MalformedQueryException, QueryEvaluationException, TupleQueryResultHandlerException {
+        final TupleQuery tupleQuery = connection.prepareTupleQuery(
                 QueryLanguage.SPARQL, query);
-        if (auth != null && auth.length() > 0)
+        if (auth != null && auth.length() > 0) {
             tupleQuery.setBinding(CONF_QUERY_AUTH, valueFactory.createLiteral(auth));
-        if (infer != null)
+        }
+        if (infer != null) {
             tupleQuery.setBinding(CONF_INFER, valueFactory.createLiteral(infer));
+        }
         if (CbSailEndpoint.CbSailOutput.BINARY.equals(queryOutput)) {
             final List listOutput = new ArrayList();
-            TupleQueryResultHandlerBase handler = new TupleQueryResultHandlerBase() {
+            final TupleQueryResultHandlerBase handler = new TupleQueryResultHandlerBase() {
                 @Override
-                public void handleSolution(BindingSet bindingSet) throws TupleQueryResultHandlerException {
-                    Map<String, String> map = new HashMap<String, String>();
-                    for (String s : bindingSet.getBindingNames()) {
+                public void handleSolution(final BindingSet bindingSet) throws TupleQueryResultHandlerException {
+                    final Map<String, String> map = new HashMap<String, String>();
+                    for (final String s : bindingSet.getBindingNames()) {
                         map.put(s, bindingSet.getBinding(s).getValue().stringValue());
                     }
                     listOutput.add(map);
@@ -133,10 +150,10 @@ public class CbSailProducer extends DefaultProducer {
             tupleQuery.evaluate(handler);
             return listOutput;
         } else if (CbSailEndpoint.CbSailOutput.XML.equals(queryOutput)) {
-            ByteArrayOutputStream baos = new ByteArrayOutputStream();
-            SPARQLResultsXMLWriter sparqlWriter = new SPARQLResultsXMLWriter(baos);
+            final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+            final SPARQLResultsXMLWriter sparqlWriter = new SPARQLResultsXMLWriter(baos);
             tupleQuery.evaluate(sparqlWriter);
-            return new String(baos.toByteArray());
+            return new String(baos.toByteArray(), StandardCharsets.UTF_8);
         } else {
             throw new IllegalArgumentException("Query Output[" + queryOutput + "] is not recognized");
         }
@@ -164,7 +181,7 @@ public class CbSailProducer extends DefaultProducer {
 
     @Override
     protected void doStart() throws Exception {
-        CbSailEndpoint cbSailEndpoint = (CbSailEndpoint) getEndpoint();
+        final CbSailEndpoint cbSailEndpoint = (CbSailEndpoint) getEndpoint();
         connection = cbSailEndpoint.getSailRepository().getConnection();
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/AccumuloStorage.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/AccumuloStorage.java b/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/AccumuloStorage.java
index ac151d9..c97c717 100644
--- a/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/AccumuloStorage.java
+++ b/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/AccumuloStorage.java
@@ -8,9 +8,9 @@ package org.apache.rya.accumulo.pig;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -26,6 +26,7 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.LinkedList;
@@ -34,8 +35,8 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.Constants;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
-import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
 import org.apache.accumulo.core.client.BatchWriterConfig;
+import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
 import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
 import org.apache.accumulo.core.client.mapreduce.lib.util.ConfiguratorBase;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
@@ -110,8 +111,8 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
                 return null;
             }
 
-            Key key = (Key) reader.getCurrentKey();
-            Value value = (Value) reader.getCurrentValue();
+            final Key key = reader.getCurrentKey();
+            final Value value = reader.getCurrentValue();
             assert key != null && value != null;
 
             if (logger.isTraceEnabled()) {
@@ -119,7 +120,7 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
             }
 
             // and wrap it in a tuple
-            Tuple tuple = TupleFactory.getInstance().newTuple(6);
+            final Tuple tuple = TupleFactory.getInstance().newTuple(6);
             tuple.set(0, new DataByteArray(key.getRow().getBytes()));
             tuple.set(1, new DataByteArray(key.getColumnFamily().getBytes()));
             tuple.set(2, new DataByteArray(key.getColumnQualifier().getBytes()));
@@ -130,7 +131,7 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
                 logger.trace("Output tuple[" + tuple + "]");
             }
             return tuple;
-        } catch (InterruptedException e) {
+        } catch (final InterruptedException e) {
             throw new IOException(e.getMessage());
         }
     }
@@ -141,12 +142,12 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
     }
 
     @Override
-    public void prepareToRead(RecordReader reader, PigSplit split) {
+    public void prepareToRead(final RecordReader reader, final PigSplit split) {
         this.reader = reader;
     }
 
     @Override
-    public void setLocation(String location, Job job) throws IOException {
+    public void setLocation(final String location, final Job job) throws IOException {
         if (logger.isDebugEnabled()) {
             logger.debug("Set Location[" + location + "] for job[" + job.getJobName() + "]");
         }
@@ -155,8 +156,8 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
 
         if (!ConfiguratorBase.isConnectorInfoSet(AccumuloInputFormat.class, conf)) {
             try {
-				AccumuloInputFormat.setConnectorInfo(job, user, new PasswordToken(password.getBytes()));
-			} catch (AccumuloSecurityException e) {
+				AccumuloInputFormat.setConnectorInfo(job, user, new PasswordToken(password.getBytes(StandardCharsets.UTF_8)));
+			} catch (final AccumuloSecurityException e) {
 				throw new RuntimeException(e);
 			}
             AccumuloInputFormat.setInputTableName(job, table);
@@ -167,8 +168,9 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
                 AccumuloInputFormat.setMockInstance(job, inst);
             }
         }
-        if (columnFamilyColumnQualifierPairs.size() > 0)
+        if (columnFamilyColumnQualifierPairs.size() > 0) {
             AccumuloInputFormat.fetchColumns(job, columnFamilyColumnQualifierPairs);
+        }
         logger.info("Set ranges[" + ranges + "] for job[" + job.getJobName() + "] on table[" + table + "] " +
                 "for columns[" + columnFamilyColumnQualifierPairs + "] with authorizations[" + authorizations + "]");
 
@@ -178,24 +180,25 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
         AccumuloInputFormat.setRanges(job, ranges);
     }
 
-    protected void setLocationFromUri(String uri, Job job) throws IOException {
+    protected void setLocationFromUri(final String uri, final Job job) throws IOException {
         // ex: accumulo://table1?instance=myinstance&user=root&password=secret&zookeepers=127.0.0.1:2181&auths=PRIVATE,PUBLIC&columns=col1|cq1,col2|cq2&range=a|z&range=1|9&mock=true
         try {
-            if (!uri.startsWith("accumulo://"))
+            if (!uri.startsWith("accumulo://")) {
                 throw new Exception("Bad scheme.");
-            String[] urlParts = uri.split("\\?");
+            }
+            final String[] urlParts = uri.split("\\?");
             setLocationFromUriParts(urlParts);
 
-        } catch (Exception e) {
+        } catch (final Exception e) {
             throw new IOException("Expected 'accumulo://<table>[?instance=<instanceName>&user=<user>&password=<password>&zookeepers=<zookeepers>&auths=<authorizations>&[range=startRow|endRow[...],columns=[cf1|cq1,cf2|cq2,...]],mock=true(false)]': " + e.getMessage(), e);
         }
     }
 
-    protected void setLocationFromUriParts(String[] urlParts) {
+    protected void setLocationFromUriParts(final String[] urlParts) {
         String columns = "";
         if (urlParts.length > 1) {
-            for (String param : urlParts[1].split("&")) {
-                String[] pair = param.split("=");
+            for (final String param : urlParts[1].split("&")) {
+                final String[] pair = param.split("=");
                 if (pair[0].equals("instance")) {
                     inst = pair[1];
                 } else if (pair[0].equals("user")) {
@@ -209,7 +212,7 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
                 } else if (pair[0].equals("columns")) {
                     columns = pair[1];
                 } else if (pair[0].equals("range")) {
-                    String[] r = pair[1].split("\\|");
+                    final String[] r = pair[1].split("\\|");
                     if (r.length == 2) {
                         addRange(new Range(r[0], r[1]));
                     } else {
@@ -221,7 +224,7 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
                 addLocationFromUriPart(pair);
             }
         }
-        String[] parts = urlParts[0].split("/+");
+        final String[] parts = urlParts[0].split("/+");
         table = parts[1];
         tableName = new Text(table);
 
@@ -232,11 +235,11 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
         }
 
         if (!columns.equals("")) {
-            for (String cfCq : columns.split(",")) {
+            for (final String cfCq : columns.split(",")) {
                 if (cfCq.contains("|")) {
-                    String[] c = cfCq.split("\\|");
-                    String cf = c[0];
-                    String cq = c[1];
+                    final String[] c = cfCq.split("\\|");
+                    final String cf = c[0];
+                    final String cq = c[1];
                     addColumnPair(cf, cq);
                 } else {
                     addColumnPair(cfCq, null);
@@ -245,50 +248,53 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
         }
     }
 
-    protected void addColumnPair(String cf, String cq) {
+    protected void addColumnPair(final String cf, final String cq) {
         columnFamilyColumnQualifierPairs.add(new Pair<Text, Text>((cf != null) ? new Text(cf) : null, (cq != null) ? new Text(cq) : null));
     }
 
-    protected void addLocationFromUriPart(String[] pair) {
+    protected void addLocationFromUriPart(final String[] pair) {
 
     }
 
-    protected void addRange(Range range) {
+    protected void addRange(final Range range) {
         ranges.add(range);
     }
 
     @Override
-    public String relativeToAbsolutePath(String location, Path curDir) throws IOException {
+    public String relativeToAbsolutePath(final String location, final Path curDir) throws IOException {
         return location;
     }
 
     @Override
-    public void setUDFContextSignature(String signature) {
+    public void setUDFContextSignature(final String signature) {
 
     }
 
     /* StoreFunc methods */
-    public void setStoreFuncUDFContextSignature(String signature) {
+    @Override
+    public void setStoreFuncUDFContextSignature(final String signature) {
 
     }
 
-    public String relToAbsPathForStoreLocation(String location, Path curDir) throws IOException {
+    @Override
+    public String relToAbsPathForStoreLocation(final String location, final Path curDir) throws IOException {
         return relativeToAbsolutePath(location, curDir);
     }
 
-    public void setStoreLocation(String location, Job job) throws IOException {
+    @Override
+    public void setStoreLocation(final String location, final Job job) throws IOException {
         conf = job.getConfiguration();
         setLocationFromUri(location, job);
 
         if (!conf.getBoolean(AccumuloOutputFormat.class.getSimpleName() + ".configured", false)) {
             try {
-				AccumuloOutputFormat.setConnectorInfo(job, user, new PasswordToken(password.getBytes()));
-			} catch (AccumuloSecurityException e) {
+				AccumuloOutputFormat.setConnectorInfo(job, user, new PasswordToken(password.getBytes(StandardCharsets.UTF_8)));
+			} catch (final AccumuloSecurityException e) {
 				throw new RuntimeException(e);
 			}
             AccumuloOutputFormat.setDefaultTableName(job, table);
             AccumuloOutputFormat.setZooKeeperInstance(job, inst, zookeepers);
-            BatchWriterConfig config = new BatchWriterConfig();
+            final BatchWriterConfig config = new BatchWriterConfig();
             config.setMaxLatency(10, TimeUnit.SECONDS);
             config.setMaxMemory(10 * 1000 * 1000);
             config.setMaxWriteThreads(10);
@@ -296,66 +302,70 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
         }
     }
 
+    @Override
     public OutputFormat getOutputFormat() {
         return new AccumuloOutputFormat();
     }
 
-    public void checkSchema(ResourceSchema schema) throws IOException {
+    @Override
+    public void checkSchema(final ResourceSchema schema) throws IOException {
         // we don't care about types, they all get casted to ByteBuffers
     }
 
-    public void prepareToWrite(RecordWriter writer) {
+    @Override
+    public void prepareToWrite(final RecordWriter writer) {
         this.writer = writer;
     }
 
-    public void putNext(Tuple t) throws ExecException, IOException {
-        Mutation mut = new Mutation(objToText(t.get(0)));
-        Text cf = objToText(t.get(1));
-        Text cq = objToText(t.get(2));
+    @Override
+    public void putNext(final Tuple t) throws ExecException, IOException {
+        final Mutation mut = new Mutation(objToText(t.get(0)));
+        final Text cf = objToText(t.get(1));
+        final Text cq = objToText(t.get(2));
 
         if (t.size() > 4) {
-            Text cv = objToText(t.get(3));
-            Value val = new Value(objToBytes(t.get(4)));
+            final Text cv = objToText(t.get(3));
+            final Value val = new Value(objToBytes(t.get(4)));
             if (cv.getLength() == 0) {
                 mut.put(cf, cq, val);
             } else {
                 mut.put(cf, cq, new ColumnVisibility(cv), val);
             }
         } else {
-            Value val = new Value(objToBytes(t.get(3)));
+            final Value val = new Value(objToBytes(t.get(3)));
             mut.put(cf, cq, val);
         }
 
         try {
             writer.write(tableName, mut);
-        } catch (InterruptedException e) {
+        } catch (final InterruptedException e) {
             throw new IOException(e);
         }
     }
 
-    private static Text objToText(Object o) {
+    private static Text objToText(final Object o) {
         return new Text(objToBytes(o));
     }
 
-    private static byte[] objToBytes(Object o) {
+    private static byte[] objToBytes(final Object o) {
         if (o instanceof String) {
-            String str = (String) o;
-            return str.getBytes();
+            final String str = (String) o;
+            return str.getBytes(StandardCharsets.UTF_8);
         } else if (o instanceof Long) {
-            Long l = (Long) o;
-            return l.toString().getBytes();
+            final Long l = (Long) o;
+            return l.toString().getBytes(StandardCharsets.UTF_8);
         } else if (o instanceof Integer) {
-            Integer l = (Integer) o;
-            return l.toString().getBytes();
+            final Integer l = (Integer) o;
+            return l.toString().getBytes(StandardCharsets.UTF_8);
         } else if (o instanceof Boolean) {
-            Boolean l = (Boolean) o;
-            return l.toString().getBytes();
+            final Boolean l = (Boolean) o;
+            return l.toString().getBytes(StandardCharsets.UTF_8);
         } else if (o instanceof Float) {
-            Float l = (Float) o;
-            return l.toString().getBytes();
+            final Float l = (Float) o;
+            return l.toString().getBytes(StandardCharsets.UTF_8);
         } else if (o instanceof Double) {
-            Double l = (Double) o;
-            return l.toString().getBytes();
+            final Double l = (Double) o;
+            return l.toString().getBytes(StandardCharsets.UTF_8);
         }
 
         // TODO: handle DataBag, Map<Object, Object>, and Tuple
@@ -363,19 +373,20 @@ public class AccumuloStorage extends LoadFunc implements StoreFuncInterface, Ord
         return ((DataByteArray) o).get();
     }
 
-    public void cleanupOnFailure(String failure, Job job) {
+    @Override
+    public void cleanupOnFailure(final String failure, final Job job) {
     }
 
     @Override
-    public WritableComparable<?> getSplitComparable(InputSplit inputSplit) throws IOException {
+    public WritableComparable<?> getSplitComparable(final InputSplit inputSplit) throws IOException {
         //cannot get access to the range directly
-        AccumuloInputFormat.RangeInputSplit rangeInputSplit = (AccumuloInputFormat.RangeInputSplit) inputSplit;
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        DataOutputStream out = new DataOutputStream(baos);
+        final AccumuloInputFormat.RangeInputSplit rangeInputSplit = (AccumuloInputFormat.RangeInputSplit) inputSplit;
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        final DataOutputStream out = new DataOutputStream(baos);
         rangeInputSplit.write(out);
         out.close();
-        DataInputStream stream = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
-        Range range = new Range();
+        final DataInputStream stream = new DataInputStream(new ByteArrayInputStream(baos.toByteArray()));
+        final Range range = new Range();
         range.readFields(stream);
         stream.close();
         return range;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/IndexWritingTool.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/IndexWritingTool.java b/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/IndexWritingTool.java
index c1d426c..615c062 100644
--- a/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/IndexWritingTool.java
+++ b/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/IndexWritingTool.java
@@ -8,9 +8,9 @@ package org.apache.rya.accumulo.pig;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -23,8 +23,8 @@ package org.apache.rya.accumulo.pig;
 
 import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
-import java.util.Set;
 import java.util.UUID;
 import java.util.regex.Pattern;
 
@@ -39,7 +39,6 @@ import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
 import org.apache.accumulo.core.client.security.tokens.PasswordToken;
 import org.apache.accumulo.core.data.Mutation;
 import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.security.Authorizations;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -48,7 +47,6 @@ import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.util.Tool;
@@ -56,8 +54,6 @@ import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Logger;
 import org.openrdf.query.MalformedQueryException;
 import org.openrdf.query.algebra.Projection;
-import org.openrdf.query.algebra.ProjectionElem;
-import org.openrdf.query.algebra.ProjectionElemList;
 import org.openrdf.query.algebra.TupleExpr;
 import org.openrdf.query.parser.sparql.SPARQLParser;
 
@@ -66,15 +62,15 @@ import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
 public class IndexWritingTool extends Configured implements Tool {
-    
+
     private static final String sparql_key = "SPARQL.VALUE";
     private static String cardCounter = "count";
-    
-    
-    public static void main(String[] args) throws Exception {
-        
+
+
+    public static void main(final String[] args) throws Exception {
+
       ToolRunner.run(new Configuration(), new IndexWritingTool(), args);
-       
+
     }
 
     @Override
@@ -90,12 +86,12 @@ public class IndexWritingTool extends Configured implements Tool {
         final String passStr = args[5];
         final String tablePrefix = args[6];
 
-        String sparql = FileUtils.readFileToString(new File(sparqlFile));
+        final String sparql = FileUtils.readFileToString(new File(sparqlFile));
 
-        Job job = new Job(getConf(), "Write HDFS Index to Accumulo");
+        final Job job = new Job(getConf(), "Write HDFS Index to Accumulo");
         job.setJarByClass(this.getClass());
 
-        Configuration jobConf = job.getConfiguration();
+        final Configuration jobConf = job.getConfiguration();
         jobConf.setBoolean("mapred.map.tasks.speculative.execution", false);
         setVarOrders(sparql, jobConf);
 
@@ -120,29 +116,29 @@ public class IndexWritingTool extends Configured implements Tool {
         setAccumuloOutput(instStr, zooStr, userStr, passStr, job, tableName);
 
         jobConf.set(sparql_key, sparql);
-        
-        int complete = job.waitForCompletion(true) ? 0 : -1;
+
+        final int complete = job.waitForCompletion(true) ? 0 : -1;
 
         if (complete == 0) {
-            
-            String[] varOrders = jobConf.getStrings("varOrders");
-            String orders = Joiner.on("\u0000").join(varOrders);
+
+            final String[] varOrders = jobConf.getStrings("varOrders");
+            final String orders = Joiner.on("\u0000").join(varOrders);
             Instance inst;
-            
+
             if (zooStr.equals("mock")) {
                 inst = new MockInstance(instStr);
             } else {
                 inst = new ZooKeeperInstance(instStr, zooStr);
             }
-           
-            Connector conn = inst.getConnector(userStr, passStr.getBytes());
-            BatchWriter bw = conn.createBatchWriter(tableName, 10, 5000, 1);
-
-            Counters counters = job.getCounters();
-            Counter c1 = counters.findCounter(cardCounter, cardCounter);
-            
-            Mutation m = new Mutation("~SPARQL");
-            Value v = new Value(sparql.getBytes());
+
+            final Connector conn = inst.getConnector(userStr, passStr.getBytes(StandardCharsets.UTF_8));
+            final BatchWriter bw = conn.createBatchWriter(tableName, 10, 5000, 1);
+
+            final Counters counters = job.getCounters();
+            final Counter c1 = counters.findCounter(cardCounter, cardCounter);
+
+            final Mutation m = new Mutation("~SPARQL");
+            final Value v = new Value(sparql.getBytes(StandardCharsets.UTF_8));
             m.put(new Text("" + c1.getValue()), new Text(orders), v);
             bw.addMutation(m);
 
@@ -155,52 +151,52 @@ public class IndexWritingTool extends Configured implements Tool {
 
 
     }
-    
-    
-    public void setVarOrders(String s, Configuration conf) throws MalformedQueryException {
 
-        SPARQLParser parser = new SPARQLParser();
-        TupleExpr query = parser.parseQuery(s, null).getTupleExpr();
 
-        List<String> projList = Lists.newArrayList(((Projection) query).getProjectionElemList().getTargetNames());
-        String projElems = Joiner.on(";").join(projList);
+    public void setVarOrders(final String s, final Configuration conf) throws MalformedQueryException {
+
+        final SPARQLParser parser = new SPARQLParser();
+        final TupleExpr query = parser.parseQuery(s, null).getTupleExpr();
+
+        final List<String> projList = Lists.newArrayList(((Projection) query).getProjectionElemList().getTargetNames());
+        final String projElems = Joiner.on(";").join(projList);
         conf.set("projElems", projElems);
 
-        Pattern splitPattern1 = Pattern.compile("\n");
-        Pattern splitPattern2 = Pattern.compile(",");
-        String[] lines = splitPattern1.split(s);
+        final Pattern splitPattern1 = Pattern.compile("\n");
+        final Pattern splitPattern2 = Pattern.compile(",");
+        final String[] lines = splitPattern1.split(s);
 
-        List<String> varOrders = Lists.newArrayList();
-        List<String> varOrderPos = Lists.newArrayList();
+        final List<String> varOrders = Lists.newArrayList();
+        final List<String> varOrderPos = Lists.newArrayList();
 
         int orderNum = 0;
-        int projSizeSq = projList.size()*projList.size();
-        
+        final int projSizeSq = projList.size()*projList.size();
+
         for (String t : lines) {
 
 
             if(orderNum > projSizeSq){
                 break;
             }
-            
+
             String[] order = null;
             if (t.startsWith("#prefix")) {
                 t = t.substring(7).trim();
                 order = splitPattern2.split(t, projList.size());
             }
 
-            
+
             String tempVarOrder = "";
             String tempVarOrderPos = "";
 
             if (order != null) {
-                for (String u : order) {
+                for (final String u : order) {
                     if (tempVarOrder.length() == 0) {
                         tempVarOrder = u.trim();
                     } else {
                         tempVarOrder = tempVarOrder + ";" + u.trim();
                     }
-                    int pos = projList.indexOf(u.trim());
+                    final int pos = projList.indexOf(u.trim());
                     if (pos < 0) {
                         throw new IllegalArgumentException("Invalid variable order!");
                     } else {
@@ -215,17 +211,17 @@ public class IndexWritingTool extends Configured implements Tool {
                 varOrders.add(tempVarOrder);
                 varOrderPos.add(tempVarOrderPos);
             }
-            
+
             if(tempVarOrder.length() > 0) {
                 orderNum++;
             }
 
         }
-        
+
         if(orderNum ==  0) {
             varOrders.add(projElems);
             String tempVarPos = "";
-            
+
             for(int i = 0; i < projList.size(); i++) {
                 if(i == 0) {
                     tempVarPos = Integer.toString(0);
@@ -234,29 +230,29 @@ public class IndexWritingTool extends Configured implements Tool {
                 }
             }
             varOrderPos.add(tempVarPos);
-            
+
         }
-        
-        String[] vOrders = varOrders.toArray(new String[varOrders.size()]);
-        String[] vOrderPos = varOrderPos.toArray(new String[varOrderPos.size()]);
-        
-        
-        
+
+        final String[] vOrders = varOrders.toArray(new String[varOrders.size()]);
+        final String[] vOrderPos = varOrderPos.toArray(new String[varOrderPos.size()]);
+
+
+
         conf.setStrings("varOrders", vOrders);
         conf.setStrings("varOrderPos", vOrderPos);
 
     }
-    
 
-    private static void setAccumuloOutput(String instStr, String zooStr, String userStr, String passStr, Job job, String tableName)
+
+    private static void setAccumuloOutput(final String instStr, final String zooStr, final String userStr, final String passStr, final Job job, final String tableName)
             throws AccumuloSecurityException {
 
-        AuthenticationToken token = new PasswordToken(passStr);
+        final AuthenticationToken token = new PasswordToken(passStr);
         AccumuloOutputFormat.setConnectorInfo(job, userStr, token);
         AccumuloOutputFormat.setDefaultTableName(job, tableName);
         AccumuloOutputFormat.setCreateTables(job, true);
         //TODO best way to do this?
-        
+
         if (zooStr.equals("mock")) {
             AccumuloOutputFormat.setMockInstance(job, instStr);
         } else {
@@ -270,41 +266,41 @@ public class IndexWritingTool extends Configured implements Tool {
     }
 
     public static class MyMapper extends Mapper<LongWritable, Text, Text, Mutation> {
-        
+
         private static final Logger logger = Logger.getLogger(MyMapper.class);
         final static Text EMPTY_TEXT = new Text();
         final static Value EMPTY_VALUE = new Value(new byte[] {});
         private String[] varOrderPos = null;
         private String[] projElem = null;
         private Pattern splitPattern = null;
-        private List<List<Integer>> varPositions = Lists.newArrayList();
-        
-        
+        private final List<List<Integer>> varPositions = Lists.newArrayList();
+
+
 
         @Override
-        protected void setup(Mapper<LongWritable, Text, Text, Mutation>.Context context) throws IOException,
+        protected void setup(final Mapper<LongWritable, Text, Text, Mutation>.Context context) throws IOException,
                 InterruptedException {
-           
-            Configuration conf = context.getConfiguration();
-            
+
+            final Configuration conf = context.getConfiguration();
+
             varOrderPos = conf.getStrings("varOrderPos");
             splitPattern = Pattern.compile("\t");
-            
-            for (String s : varOrderPos) {
-                String[] pos = s.split(";");
-                List<Integer> intPos = Lists.newArrayList();
+
+            for (final String s : varOrderPos) {
+                final String[] pos = s.split(";");
+                final List<Integer> intPos = Lists.newArrayList();
                 int i = 0;
-                for(String t: pos) {
+                for(final String t: pos) {
                     i = Integer.parseInt(t);
                     intPos.add(i);
                 }
-                
+
                 varPositions.add(intPos);
-                
+
             }
-            
+
             projElem = conf.get("projElems").split(";");
-            
+
             super.setup(context);
         }
 
@@ -314,17 +310,17 @@ public class IndexWritingTool extends Configured implements Tool {
 
 
         @Override
-        public void map(LongWritable key, Text value, Context output) throws IOException, InterruptedException {
+        public void map(final LongWritable key, final Text value, final Context output) throws IOException, InterruptedException {
+
+            final String[] result = splitPattern.split(value.toString());
 
-            String[] result = splitPattern.split(value.toString());
 
-           
-            for (List<Integer> list : varPositions) {
+            for (final List<Integer> list : varPositions) {
 
                 String values = "";
                 String vars = "";
 
-                for (Integer i : list) {
+                for (final Integer i : list) {
 
                     if (values.length() == 0) {
                         values = result[i];
@@ -335,7 +331,7 @@ public class IndexWritingTool extends Configured implements Tool {
                     }
 
                 }
-                Mutation m = new Mutation(new Text(values));
+                final Mutation m = new Mutation(new Text(values));
                 m.put(new Text(vars), EMPTY_TEXT, EMPTY_VALUE);
                 output.write(EMPTY_TEXT, m);
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/SparqlQueryPigEngine.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/SparqlQueryPigEngine.java b/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/SparqlQueryPigEngine.java
index 782840c..5c2c52c 100644
--- a/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/SparqlQueryPigEngine.java
+++ b/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/SparqlQueryPigEngine.java
@@ -1,5 +1,31 @@
 package org.apache.rya.accumulo.pig;
 
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+
+import org.apache.accumulo.core.client.Connector;
+import org.apache.accumulo.core.client.ZooKeeperInstance;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.pig.ExecType;
+import org.apache.pig.PigServer;
+import org.apache.rya.accumulo.AccumuloRdfConfiguration;
+import org.apache.rya.accumulo.AccumuloRdfEvalStatsDAO;
+import org.apache.rya.accumulo.AccumuloRyaDAO;
+import org.apache.rya.rdftriplestore.evaluation.QueryJoinOptimizer;
+import org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics;
+import org.apache.rya.rdftriplestore.inference.InferenceEngine;
+import org.apache.rya.rdftriplestore.inference.InverseOfVisitor;
+import org.apache.rya.rdftriplestore.inference.SymmetricPropertyVisitor;
+import org.apache.rya.rdftriplestore.inference.TransitivePropertyVisitor;
+import org.openrdf.query.algebra.QueryRoot;
+import org.openrdf.query.parser.ParsedQuery;
+import org.openrdf.query.parser.QueryParser;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +34,9 @@ package org.apache.rya.accumulo.pig;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -22,31 +48,6 @@ package org.apache.rya.accumulo.pig;
 
 
 import com.google.common.base.Preconditions;
-import com.google.common.io.ByteStreams;
-import org.apache.rya.accumulo.AccumuloRdfConfiguration;
-import org.apache.rya.accumulo.AccumuloRdfEvalStatsDAO;
-import org.apache.rya.accumulo.AccumuloRyaDAO;
-import org.apache.rya.accumulo.pig.optimizer.SimilarVarJoinOptimizer;
-import org.apache.rya.rdftriplestore.evaluation.QueryJoinOptimizer;
-import org.apache.rya.rdftriplestore.evaluation.RdfCloudTripleStoreEvaluationStatistics;
-import org.apache.rya.rdftriplestore.inference.InferenceEngine;
-import org.apache.rya.rdftriplestore.inference.InverseOfVisitor;
-import org.apache.rya.rdftriplestore.inference.SymmetricPropertyVisitor;
-import org.apache.rya.rdftriplestore.inference.TransitivePropertyVisitor;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.ZooKeeperInstance;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.pig.ExecType;
-import org.apache.pig.PigServer;
-import org.openrdf.query.algebra.QueryRoot;
-import org.openrdf.query.parser.ParsedQuery;
-import org.openrdf.query.parser.QueryParser;
-import org.openrdf.query.parser.sparql.SPARQLParser;
-
-import java.io.ByteArrayInputStream;
-import java.io.FileInputStream;
-import java.io.IOException;
 
 /**
  * Created by IntelliJ IDEA.
@@ -74,7 +75,7 @@ public class SparqlQueryPigEngine {
         return conf;
     }
 
-    public void setConf(AccumuloRdfConfiguration conf) {
+    public void setConf(final AccumuloRdfConfiguration conf) {
         this.conf = conf;
     }
 
@@ -92,14 +93,14 @@ public class SparqlQueryPigEngine {
         }
 
         if (inference || stats) {
-            String instance = sparqlToPigTransformVisitor.getInstance();
-            String zoo = sparqlToPigTransformVisitor.getZk();
-            String user = sparqlToPigTransformVisitor.getUser();
-            String pass = sparqlToPigTransformVisitor.getPassword();
+            final String instance = sparqlToPigTransformVisitor.getInstance();
+            final String zoo = sparqlToPigTransformVisitor.getZk();
+            final String user = sparqlToPigTransformVisitor.getUser();
+            final String pass = sparqlToPigTransformVisitor.getPassword();
 
-            Connector connector = new ZooKeeperInstance(instance, zoo).getConnector(user, pass.getBytes());
+            final Connector connector = new ZooKeeperInstance(instance, zoo).getConnector(user, pass.getBytes(StandardCharsets.UTF_8));
 
-            String tablePrefix = sparqlToPigTransformVisitor.getTablePrefix();
+            final String tablePrefix = sparqlToPigTransformVisitor.getTablePrefix();
             conf.setTablePrefix(tablePrefix);
             if (inference) {
                 logger.info("Using inference");
@@ -147,28 +148,28 @@ public class SparqlQueryPigEngine {
      * @param hdfsSaveLocation to save the execution
      * @throws java.io.IOException
      */
-    public void runQuery(String sparql, String hdfsSaveLocation) throws IOException {
+    public void runQuery(final String sparql, final String hdfsSaveLocation) throws IOException {
         Preconditions.checkNotNull(sparql, "Sparql query cannot be null");
         Preconditions.checkNotNull(hdfsSaveLocation, "Hdfs save location cannot be null");
         logger.info("Running query[" + sparql + "]\n to Location[" + hdfsSaveLocation + "]");
         pigServer.deleteFile(hdfsSaveLocation);
         try {
-            String pigScript = generatePigScript(sparql);
+            final String pigScript = generatePigScript(sparql);
             if (logger.isDebugEnabled()) {
                 logger.debug("Pig script [" + pigScript + "]");
             }
-            pigServer.registerScript(new ByteArrayInputStream(pigScript.getBytes()));
+            pigServer.registerScript(new ByteArrayInputStream(pigScript.getBytes(StandardCharsets.UTF_8)));
             pigServer.store("PROJ", hdfsSaveLocation); //TODO: Make this a constant
-        } catch (Exception e) {
+        } catch (final Exception e) {
             throw new IOException(e);
         }
     }
 
-    public String generatePigScript(String sparql) throws Exception {
+    public String generatePigScript(final String sparql) throws Exception {
         Preconditions.checkNotNull(sparql, "Sparql query cannot be null");
-        QueryParser parser = new SPARQLParser();
-        ParsedQuery parsedQuery = parser.parseQuery(sparql, null);
-        QueryRoot tupleExpr = new QueryRoot(parsedQuery.getTupleExpr());
+        final QueryParser parser = new SPARQLParser();
+        final ParsedQuery parsedQuery = parser.parseQuery(sparql, null);
+        final QueryRoot tupleExpr = new QueryRoot(parsedQuery.getTupleExpr());
 
 //        SimilarVarJoinOptimizer similarVarJoinOptimizer = new SimilarVarJoinOptimizer();
 //        similarVarJoinOptimizer.optimize(tupleExpr, null, null);
@@ -189,31 +190,31 @@ public class SparqlQueryPigEngine {
     }
 
 
-    public static void main(String[] args) {
+    public static void main(final String[] args) {
         try {
             Preconditions.checkArgument(args.length == 7, "Usage: java -cp <jar>:$PIG_LIB <class> sparqlFile hdfsSaveLocation cbinstance cbzk cbuser cbpassword rdfTablePrefix.\n " +
                     "Sample command: java -cp java -cp cloudbase.pig-2.0.0-SNAPSHOT-shaded.jar:/usr/local/hadoop-etc/hadoop-0.20.2/hadoop-0.20.2-core.jar:/srv_old/hdfs-tmp/pig/pig-0.9.2/pig-0.9.2.jar:$HADOOP_HOME/conf org.apache.rya.accumulo.pig.SparqlQueryPigEngine " +
                     "tstSpqrl.query temp/engineTest stratus stratus13:2181 root password l_");
-            String sparql = new String(ByteStreams.toByteArray(new FileInputStream(args[0])));
-            String hdfsSaveLocation = args[1];
-            SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
+            final String sparql = FileUtils.readFileToString(new File(args[0]), StandardCharsets.UTF_8);
+            final String hdfsSaveLocation = args[1];
+            final SparqlToPigTransformVisitor visitor = new SparqlToPigTransformVisitor();
             visitor.setTablePrefix(args[6]);
             visitor.setInstance(args[2]);
             visitor.setZk(args[3]);
             visitor.setUser(args[4]);
             visitor.setPassword(args[5]);
 
-            SparqlQueryPigEngine engine = new SparqlQueryPigEngine();
+            final SparqlQueryPigEngine engine = new SparqlQueryPigEngine();
             engine.setSparqlToPigTransformVisitor(visitor);
             engine.setInference(false);
             engine.setStats(false);
-            
+
             engine.init();
 
             engine.runQuery(sparql, hdfsSaveLocation);
 
             engine.destroy();
-        } catch (Exception e) {
+        } catch (final Exception e) {
             e.printStackTrace();
         }
     }
@@ -222,7 +223,7 @@ public class SparqlQueryPigEngine {
         return hadoopDir;
     }
 
-    public void setHadoopDir(String hadoopDir) {
+    public void setHadoopDir(final String hadoopDir) {
         this.hadoopDir = hadoopDir;
     }
 
@@ -230,7 +231,7 @@ public class SparqlQueryPigEngine {
         return pigServer;
     }
 
-    public void setPigServer(PigServer pigServer) {
+    public void setPigServer(final PigServer pigServer) {
         this.pigServer = pigServer;
     }
 
@@ -238,7 +239,7 @@ public class SparqlQueryPigEngine {
         return execType;
     }
 
-    public void setExecType(ExecType execType) {
+    public void setExecType(final ExecType execType) {
         this.execType = execType;
     }
 
@@ -246,7 +247,7 @@ public class SparqlQueryPigEngine {
         return inference;
     }
 
-    public void setInference(boolean inference) {
+    public void setInference(final boolean inference) {
         this.inference = inference;
     }
 
@@ -254,7 +255,7 @@ public class SparqlQueryPigEngine {
         return stats;
     }
 
-    public void setStats(boolean stats) {
+    public void setStats(final boolean stats) {
         this.stats = stats;
     }
 
@@ -262,7 +263,7 @@ public class SparqlQueryPigEngine {
         return sparqlToPigTransformVisitor;
     }
 
-    public void setSparqlToPigTransformVisitor(SparqlToPigTransformVisitor sparqlToPigTransformVisitor) {
+    public void setSparqlToPigTransformVisitor(final SparqlToPigTransformVisitor sparqlToPigTransformVisitor) {
         this.sparqlToPigTransformVisitor = sparqlToPigTransformVisitor;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/StatementPatternStorage.java
----------------------------------------------------------------------
diff --git a/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/StatementPatternStorage.java b/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/StatementPatternStorage.java
index 974888b..93266df 100644
--- a/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/StatementPatternStorage.java
+++ b/pig/accumulo.pig/src/main/java/org/apache/rya/accumulo/pig/StatementPatternStorage.java
@@ -8,9 +8,9 @@ package org.apache.rya.accumulo.pig;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -22,10 +22,21 @@ package org.apache.rya.accumulo.pig;
 
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.accumulo.core.client.ZooKeeperInstance;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.data.Key;
+import org.apache.accumulo.core.data.Range;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.pig.data.Tuple;
+import org.apache.pig.data.TupleFactory;
 import org.apache.rya.accumulo.AccumuloRdfConfiguration;
 import org.apache.rya.accumulo.AccumuloRyaDAO;
 import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
@@ -41,17 +52,6 @@ import org.apache.rya.api.resolver.RyaTripleContext;
 import org.apache.rya.api.resolver.triple.TripleRow;
 import org.apache.rya.rdftriplestore.inference.InferenceEngine;
 import org.apache.rya.rdftriplestore.inference.InferenceEngineException;
-
-import org.apache.accumulo.core.client.ZooKeeperInstance;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.Range;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.pig.data.Tuple;
-import org.apache.pig.data.TupleFactory;
 import org.openrdf.model.Resource;
 import org.openrdf.model.URI;
 import org.openrdf.model.Value;
@@ -94,7 +94,7 @@ public class StatementPatternStorage extends AccumuloStorage {
     	else {
     		ryaContext = RyaTripleContext.getInstance(new AccumuloRdfConfiguration());
     	}
-    	
+
     }
 
     private Value getValue(Var subjectVar) {
@@ -115,8 +115,9 @@ public class StatementPatternStorage extends AccumuloStorage {
             addInferredRanges(table, job);
         }
 
-        if (layout == null || ranges.size() == 0)
+        if (layout == null || ranges.size() == 0) {
             throw new IllegalArgumentException("Range and/or layout is null. Check the query");
+        }
         table = RdfCloudTripleStoreUtils.layoutPrefixToTable(layout, table);
         tableName = new Text(table);
     }
@@ -195,8 +196,9 @@ public class StatementPatternStorage extends AccumuloStorage {
         RyaURI predicate_rya = RdfToRyaConversions.convertURI((URI) p_v);
         RyaType object_rya = RdfToRyaConversions.convertValue(o_v);
         TriplePatternStrategy strategy = ryaContext.retrieveStrategy(subject_rya, predicate_rya, object_rya, null);
-        if (strategy == null)
+        if (strategy == null) {
             return new RdfCloudTripleStoreUtils.CustomEntry<TABLE_LAYOUT, Range>(TABLE_LAYOUT.SPO, new Range());
+        }
         Map.Entry<TABLE_LAYOUT, ByteRange> entry = strategy.defineRange(subject_rya, predicate_rya, object_rya, null, null);
         ByteRange byteRange = entry.getValue();
         return new RdfCloudTripleStoreUtils.CustomEntry<org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT, Range>(
@@ -215,9 +217,9 @@ public class StatementPatternStorage extends AccumuloStorage {
             ryaDAO.setConf(rdfConf);
             try {
                 if (!mock) {
-                    ryaDAO.setConnector(new ZooKeeperInstance(inst, zookeepers).getConnector(user, password.getBytes()));
+                    ryaDAO.setConnector(new ZooKeeperInstance(inst, zookeepers).getConnector(user, password.getBytes(StandardCharsets.UTF_8)));
                 } else {
-                    ryaDAO.setConnector(new MockInstance(inst).getConnector(user, password.getBytes()));
+                    ryaDAO.setConnector(new MockInstance(inst).getConnector(user, password.getBytes(StandardCharsets.UTF_8)));
                 }
             } catch (Exception e) {
                 throw new IOException(e);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3dc7c68..a6415ad 100644
--- a/pom.xml
+++ b/pom.xml
@@ -136,7 +136,6 @@ under the License.
 
         <jsr305.version>1.3.9-1</jsr305.version>
         <jcip.version>1.0-1</jcip.version>
-        <findbugs.plugin.version>3.0.4</findbugs.plugin.version>
         <kafka.version>0.10.0.1</kafka.version>
         <jopt-simple.version>4.9</jopt-simple.version>
         
@@ -682,7 +681,6 @@ under the License.
                 <groupId>junit</groupId>
                 <artifactId>junit</artifactId>
                 <version>${junit.version}</version>
-                <scope>test</scope>
             </dependency>
             <dependency>
                 <groupId>org.apache.mrunit</groupId>
@@ -819,7 +817,6 @@ under the License.
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-surefire-plugin</artifactId>
                     <configuration>
-                        <argLine>-Dfile.encoding=${project.build.sourceEncoding}</argLine>
                         <systemPropertyVariables>
                             <java.io.tmpdir>${project.build.directory}</java.io.tmpdir>
                         </systemPropertyVariables>
@@ -957,6 +954,15 @@ under the License.
                     <artifactId>license-maven-plugin</artifactId>
                     <version>3.0</version>
                 </plugin>
+                <plugin>
+                    <groupId>org.codehaus.mojo</groupId>
+                    <artifactId>findbugs-maven-plugin</artifactId>
+                    <version>3.0.5</version>
+                    <configuration>
+                        <effort>Max</effort>
+                        <threshold>Low</threshold>
+                    </configuration>
+                </plugin>
             </plugins>
         </pluginManagement>
 
@@ -1030,6 +1036,23 @@ under the License.
                     </excludes>
                 </configuration>
             </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>findbugs-maven-plugin</artifactId>
+                <configuration>
+                    <failOnError>true</failOnError>   <!-- These are serious defects that aren't allowed in Rya.  Fail the build. -->
+                    <visitors>DefaultEncodingDetector</visitors>  <!--  Only specify detectors that should not detect any errors. -->
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>analyze-compile</id>
+                        <phase>compile</phase>
+                        <goals>
+                            <goal>check</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
         </plugins>
     </build>
 
@@ -1038,7 +1061,6 @@ under the License.
             <plugin>
                 <groupId>org.codehaus.mojo</groupId>
                 <artifactId>findbugs-maven-plugin</artifactId>
-                <version>${findbugs.plugin.version}</version>
             </plugin>
         </plugins>
     </reporting>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
----------------------------------------------------------------------
diff --git a/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java b/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
index bf655ce..921acaa 100644
--- a/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
+++ b/sail/src/main/java/org/apache/rya/rdftriplestore/RdfCloudTripleStoreConnection.java
@@ -1,5 +1,3 @@
-package org.apache.rya.rdftriplestore;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -18,13 +16,13 @@ package org.apache.rya.rdftriplestore;
  * specific language governing permissions and limitations
  * under the License.
  */
-
-
+package org.apache.rya.rdftriplestore;
 
 import static com.google.common.base.Preconditions.checkArgument;
 import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.lang.reflect.Constructor;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
@@ -156,7 +154,7 @@ public class RdfCloudTripleStoreConnection extends SailConnectionBase {
                                         final Value object, final Resource... contexts) throws SailException {
         try {
             final String cv_s = conf.getCv();
-            final byte[] cv = cv_s == null ? null : cv_s.getBytes();
+            final byte[] cv = cv_s == null ? null : cv_s.getBytes(StandardCharsets.UTF_8);
             final List<RyaStatement> ryaStatements = new ArrayList<>();
             if (contexts != null && contexts.length > 0) {
                 for (final Resource context : contexts) {


[09/11] incubator-rya git commit: RYA-397 Renamed MockMongoSingleton/Factory to Embedded. Closes #238.

Posted by ca...@apache.org.
RYA-397 Renamed MockMongoSingleton/Factory to Embedded. Closes #238.

Since they create/use Embedded Mongo, not Mock
renamed to show what they actually use.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/26e9214c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/26e9214c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/26e9214c

Branch: refs/heads/master
Commit: 26e9214ce745bc23f8c1f44af2e4235395069aea
Parents: 244b870
Author: Andrew Smith <sm...@gmail.com>
Authored: Thu Oct 5 14:45:09 2017 -0400
Committer: jdasch <hc...@gmail.com>
Committed: Thu Oct 12 12:55:04 2017 -0400

----------------------------------------------------------------------
 .../rya/mongodb/EmbeddedMongoFactory.java       | 97 ++++++++++++++++++++
 .../apache/rya/mongodb/MockMongoFactory.java    | 97 --------------------
 .../rya/mongodb/EmbeddedMongoSingleton.java     | 82 +++++++++++++++++
 .../apache/rya/mongodb/MockMongoSingleton.java  | 82 -----------------
 .../apache/rya/mongodb/MongoRyaTestBase.java    |  4 +-
 .../org/apache/rya/mongodb/MongoTestBase.java   |  2 +-
 .../src/main/java/InferenceExamples.java        |  6 +-
 .../src/main/java/MongoRyaDirectExample.java    |  6 +-
 .../geoExamples/RyaMongoGeoDirectExample.java   |  6 +-
 .../indexing/geotemporal/mongo/MongoITBase.java |  4 +-
 .../indexing/mongo/MongoIndexerDeleteIT.java    |  4 +-
 11 files changed, 195 insertions(+), 195 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/EmbeddedMongoFactory.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/EmbeddedMongoFactory.java b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/EmbeddedMongoFactory.java
new file mode 100644
index 0000000..f023739
--- /dev/null
+++ b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/EmbeddedMongoFactory.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.mongodb;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.net.UnknownHostException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.mongodb.MongoClient;
+import com.mongodb.MongoException;
+import com.mongodb.ServerAddress;
+
+import de.flapdoodle.embed.mongo.Command;
+import de.flapdoodle.embed.mongo.MongodExecutable;
+import de.flapdoodle.embed.mongo.MongodProcess;
+import de.flapdoodle.embed.mongo.MongodStarter;
+import de.flapdoodle.embed.mongo.config.IMongodConfig;
+import de.flapdoodle.embed.mongo.config.MongodConfigBuilder;
+import de.flapdoodle.embed.mongo.config.Net;
+import de.flapdoodle.embed.mongo.config.RuntimeConfigBuilder;
+import de.flapdoodle.embed.mongo.distribution.IFeatureAwareVersion;
+import de.flapdoodle.embed.mongo.distribution.Version;
+
+public class EmbeddedMongoFactory {
+    private static Logger logger = LoggerFactory.getLogger(EmbeddedMongoFactory.class.getName());
+
+    public static EmbeddedMongoFactory newFactory() throws IOException {
+        return EmbeddedMongoFactory.with(Version.Main.PRODUCTION);
+    }
+    
+    public static EmbeddedMongoFactory with(final IFeatureAwareVersion version) throws IOException {
+        return new EmbeddedMongoFactory(version);
+    }
+
+    private final MongodExecutable mongodExecutable;
+    private final MongodProcess mongodProcess;
+
+    /**
+     * Create the testing utility using the specified version of MongoDB.
+     * 
+     * @param version
+     *            version of MongoDB.
+     */
+    private EmbeddedMongoFactory(final IFeatureAwareVersion version) throws IOException {
+        final MongodStarter runtime = MongodStarter.getInstance(new RuntimeConfigBuilder().defaultsWithLogger(Command.MongoD, logger).build());
+        mongodExecutable = runtime.prepare(newMongodConfig(version));
+        mongodProcess = mongodExecutable.start();
+    }
+
+    private IMongodConfig newMongodConfig(final IFeatureAwareVersion version) throws UnknownHostException, IOException {
+        Net net = new Net(findRandomOpenPortOnAllLocalInterfaces(), false);
+        return new MongodConfigBuilder().version(version).net(net).build();
+    }
+
+    private int findRandomOpenPortOnAllLocalInterfaces() throws IOException {
+        try (ServerSocket socket = new ServerSocket(0);) {
+            return socket.getLocalPort();
+        }
+    }
+
+    /**
+     * Creates a new Mongo connection.
+     * 
+     * @throws MongoException
+     * @throws UnknownHostException
+     */
+    public MongoClient newMongoClient() throws UnknownHostException, MongoException {
+        return new MongoClient(new ServerAddress(mongodProcess.getConfig().net().getServerAddress(), mongodProcess.getConfig().net().getPort()));
+    }
+
+    /**
+     * Cleans up the resources created by the utility.
+     */
+    public void shutdown() {
+        mongodProcess.stop();
+        mongodExecutable.stop();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MockMongoFactory.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MockMongoFactory.java b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MockMongoFactory.java
deleted file mode 100644
index baafcea..0000000
--- a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/MockMongoFactory.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.mongodb;
-
-import java.io.IOException;
-import java.net.ServerSocket;
-import java.net.UnknownHostException;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.mongodb.MongoClient;
-import com.mongodb.MongoException;
-import com.mongodb.ServerAddress;
-
-import de.flapdoodle.embed.mongo.Command;
-import de.flapdoodle.embed.mongo.MongodExecutable;
-import de.flapdoodle.embed.mongo.MongodProcess;
-import de.flapdoodle.embed.mongo.MongodStarter;
-import de.flapdoodle.embed.mongo.config.IMongodConfig;
-import de.flapdoodle.embed.mongo.config.MongodConfigBuilder;
-import de.flapdoodle.embed.mongo.config.Net;
-import de.flapdoodle.embed.mongo.config.RuntimeConfigBuilder;
-import de.flapdoodle.embed.mongo.distribution.IFeatureAwareVersion;
-import de.flapdoodle.embed.mongo.distribution.Version;
-
-public class MockMongoFactory {
-    private static Logger logger = LoggerFactory.getLogger(MockMongoFactory.class.getName());
-
-    public static MockMongoFactory newFactory() throws IOException {
-        return MockMongoFactory.with(Version.Main.PRODUCTION);
-    }
-    
-    public static MockMongoFactory with(final IFeatureAwareVersion version) throws IOException {
-        return new MockMongoFactory(version);
-    }
-
-    private final MongodExecutable mongodExecutable;
-    private final MongodProcess mongodProcess;
-
-    /**
-     * Create the testing utility using the specified version of MongoDB.
-     * 
-     * @param version
-     *            version of MongoDB.
-     */
-    private MockMongoFactory(final IFeatureAwareVersion version) throws IOException {
-        final MongodStarter runtime = MongodStarter.getInstance(new RuntimeConfigBuilder().defaultsWithLogger(Command.MongoD, logger).build());
-        mongodExecutable = runtime.prepare(newMongodConfig(version));
-        mongodProcess = mongodExecutable.start();
-    }
-
-    private IMongodConfig newMongodConfig(final IFeatureAwareVersion version) throws UnknownHostException, IOException {
-        Net net = new Net(findRandomOpenPortOnAllLocalInterfaces(), false);
-        return new MongodConfigBuilder().version(version).net(net).build();
-    }
-
-    private int findRandomOpenPortOnAllLocalInterfaces() throws IOException {
-        try (ServerSocket socket = new ServerSocket(0);) {
-            return socket.getLocalPort();
-        }
-    }
-
-    /**
-     * Creates a new Mongo connection.
-     * 
-     * @throws MongoException
-     * @throws UnknownHostException
-     */
-    public MongoClient newMongoClient() throws UnknownHostException, MongoException {
-        return new MongoClient(new ServerAddress(mongodProcess.getConfig().net().getServerAddress(), mongodProcess.getConfig().net().getPort()));
-    }
-
-    /**
-     * Cleans up the resources created by the utility.
-     */
-    public void shutdown() {
-        mongodProcess.stop();
-        mongodExecutable.stop();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/EmbeddedMongoSingleton.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/EmbeddedMongoSingleton.java b/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/EmbeddedMongoSingleton.java
new file mode 100644
index 0000000..e068405
--- /dev/null
+++ b/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/EmbeddedMongoSingleton.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.mongodb;
+
+import java.io.IOException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.mongodb.MongoClient;
+
+/**
+ * To be used for tests. Creates a singleton {@link MongoClient} to be used
+ * throughout all of the MongoDB related tests. Without the singleton, the
+ * embedded mongo factory ends up orphaning processes, consuming resources.
+ */
+public class EmbeddedMongoSingleton {
+    public static MongoClient getInstance() {
+        return InstanceHolder.SINGLETON.instance;
+    }
+
+    private EmbeddedMongoSingleton() {
+        // hiding implicit default constructor
+    }
+
+    private enum InstanceHolder {
+
+        SINGLETON;
+
+        private final Logger log;
+        private MongoClient instance;
+
+        InstanceHolder() {
+            log = LoggerFactory.getLogger(EmbeddedMongoSingleton.class);
+            instance = null;
+            try {
+                instance = EmbeddedMongoFactory.newFactory().newMongoClient();
+                // JUnit does not have an overall lifecycle event for tearing down
+                // this kind of resource, but shutdown hooks work alright in practice
+                // since this should only be used during testing
+
+                // The only other alternative for lifecycle management is to use a
+                // suite lifecycle to enclose the tests that need this resource.
+                // In practice this becomes unwieldy.
+                Runtime.getRuntime().addShutdownHook(new Thread() {
+                    @Override
+                    public void run() {
+                        try {
+                            instance.close();
+                        } catch (final Throwable t) {
+                            // logging frameworks will likely be shut down
+                            t.printStackTrace(System.err);
+                        }
+                    }
+                });
+
+            } catch (final IOException e) {
+                log.error("Unexpected error while starting mongo client", e);
+            } catch (final Throwable e) {
+                // catching throwable because failure to construct an enum
+                // instance will lead to another error being thrown downstream
+                log.error("Unexpected throwable while starting mongo client", e);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MockMongoSingleton.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MockMongoSingleton.java b/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MockMongoSingleton.java
deleted file mode 100644
index c7860af..0000000
--- a/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MockMongoSingleton.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.mongodb;
-
-import java.io.IOException;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.mongodb.MongoClient;
-
-/**
- * To be used for tests. Creates a singleton {@link MongoClient} to be used
- * throughout all of the MongoDB related tests. Without the singleton, the
- * embedded mongo factory ends up orphaning processes, consuming resources.
- */
-public class MockMongoSingleton {
-    public static MongoClient getInstance() {
-        return InstanceHolder.SINGLETON.instance;
-    }
-
-    private MockMongoSingleton() {
-        // hiding implicit default constructor
-    }
-
-    private enum InstanceHolder {
-
-        SINGLETON;
-
-        private final Logger log;
-        private MongoClient instance;
-
-        InstanceHolder() {
-            log = LoggerFactory.getLogger(MockMongoSingleton.class);
-            instance = null;
-            try {
-                instance = MockMongoFactory.newFactory().newMongoClient();
-                // JUnit does not have an overall lifecycle event for tearing down
-                // this kind of resource, but shutdown hooks work alright in practice
-                // since this should only be used during testing
-
-                // The only other alternative for lifecycle management is to use a
-                // suite lifecycle to enclose the tests that need this resource.
-                // In practice this becomes unwieldy.
-                Runtime.getRuntime().addShutdownHook(new Thread() {
-                    @Override
-                    public void run() {
-                        try {
-                            instance.close();
-                        } catch (final Throwable t) {
-                            // logging frameworks will likely be shut down
-                            t.printStackTrace(System.err);
-                        }
-                    }
-                });
-
-            } catch (final IOException e) {
-                log.error("Unexpected error while starting mongo client", e);
-            } catch (final Throwable e) {
-                // catching throwable because failure to construct an enum
-                // instance will lead to another error being thrown downstream
-                log.error("Unexpected throwable while starting mongo client", e);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoRyaTestBase.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoRyaTestBase.java b/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoRyaTestBase.java
index b0a4161..3d95818 100644
--- a/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoRyaTestBase.java
+++ b/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoRyaTestBase.java
@@ -35,13 +35,13 @@ public class MongoRyaTestBase {
 
     private static final AtomicInteger db = new AtomicInteger(1);
 
-    protected static MockMongoFactory testsFactory;
+    protected static EmbeddedMongoFactory testsFactory;
     protected MongoClient mongoClient;
     private int currentTestDb = -1;
 
     @BeforeClass()
     public static void beforeClass() throws Exception {
-        testsFactory = MockMongoFactory.with(Version.Main.PRODUCTION);
+        testsFactory = EmbeddedMongoFactory.with(Version.Main.PRODUCTION);
     }
 
     @Before

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoTestBase.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoTestBase.java b/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoTestBase.java
index e4578f2..ffd4fd9 100644
--- a/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoTestBase.java
+++ b/dao/mongodb.rya/src/test/java/org/apache/rya/mongodb/MongoTestBase.java
@@ -39,7 +39,7 @@ public class MongoTestBase {
         conf.setBoolean("sc.useMongo", true);
         conf.setTablePrefix("test_");
         conf.setMongoDBName("testDB");
-        mongoClient = MockMongoSingleton.getInstance();
+        mongoClient = EmbeddedMongoSingleton.getInstance();
         conf.setMongoClient(mongoClient);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/extras/indexingExample/src/main/java/InferenceExamples.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/InferenceExamples.java b/extras/indexingExample/src/main/java/InferenceExamples.java
index 474d7b0..d1d9dc4 100644
--- a/extras/indexingExample/src/main/java/InferenceExamples.java
+++ b/extras/indexingExample/src/main/java/InferenceExamples.java
@@ -30,7 +30,7 @@ import org.apache.log4j.PatternLayout;
 import org.apache.rya.indexing.accumulo.ConfigUtils;
 import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration;
 import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration.MongoDBIndexingConfigBuilder;
-import org.apache.rya.mongodb.MockMongoFactory;
+import org.apache.rya.mongodb.EmbeddedMongoFactory;
 import org.apache.rya.mongodb.MongoConnectorFactory;
 import org.apache.rya.rdftriplestore.RdfCloudTripleStore;
 import org.apache.rya.rdftriplestore.inference.InferenceEngineException;
@@ -88,7 +88,7 @@ public class InferenceExamples {
 	        rootLogger.setLevel(Level.INFO);
 	        // Filter out noisy messages from the following classes.
 	        Logger.getLogger(ClientCnxn.class).setLevel(Level.OFF);
-	        Logger.getLogger(MockMongoFactory.class).setLevel(Level.OFF);
+	        Logger.getLogger(EmbeddedMongoFactory.class).setLevel(Level.OFF);
 	    }
 
 	    public static void main(final String[] args) throws Exception {
@@ -154,7 +154,7 @@ public class InferenceExamples {
 		            .setUseMockMongo(USE_EMBEDDED_MONGO).setUseInference(true).setAuths("U");
 	        
 	        if (USE_EMBEDDED_MONGO) {
-	            final MongoClient c = MockMongoFactory.newFactory().newMongoClient();
+	            final MongoClient c = EmbeddedMongoFactory.newFactory().newMongoClient();
 	            final ServerAddress address = c.getAddress();
 	            final String url = address.getHost();
 	            final String port = Integer.toString(address.getPort());

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
index 4883427..f8927d4 100644
--- a/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
+++ b/extras/indexingExample/src/main/java/MongoRyaDirectExample.java
@@ -30,7 +30,7 @@ import org.apache.log4j.PatternLayout;
 import org.apache.rya.indexing.accumulo.ConfigUtils;
 import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration;
 import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration.MongoDBIndexingConfigBuilder;
-import org.apache.rya.mongodb.MockMongoFactory;
+import org.apache.rya.mongodb.EmbeddedMongoFactory;
 import org.apache.rya.mongodb.MongoConnectorFactory;
 import org.apache.rya.rdftriplestore.RdfCloudTripleStore;
 import org.apache.rya.rdftriplestore.inference.InferenceEngineException;
@@ -91,7 +91,7 @@ public class MongoRyaDirectExample {
         rootLogger.setLevel(Level.INFO);
         // Filter out noisy messages from the following classes.
         Logger.getLogger(ClientCnxn.class).setLevel(Level.OFF);
-        Logger.getLogger(MockMongoFactory.class).setLevel(Level.OFF);
+        Logger.getLogger(EmbeddedMongoFactory.class).setLevel(Level.OFF);
     }
 
     public static void main(final String[] args) throws Exception {
@@ -297,7 +297,7 @@ public class MongoRyaDirectExample {
             .setUseMockMongo(USE_MOCK).setUseInference(USE_INFER).setAuths("U");
 
         if (USE_MOCK) {
-            final MongoClient c = MockMongoFactory.newFactory().newMongoClient();
+            final MongoClient c = EmbeddedMongoFactory.newFactory().newMongoClient();
             final ServerAddress address = c.getAddress();
             final String url = address.getHost();
             final String port = Integer.toString(address.getPort());

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/extras/rya.geoindexing/geo.mongo/src/main/java/org/apache/rya/indexing/geoExamples/RyaMongoGeoDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/geo.mongo/src/main/java/org/apache/rya/indexing/geoExamples/RyaMongoGeoDirectExample.java b/extras/rya.geoindexing/geo.mongo/src/main/java/org/apache/rya/indexing/geoExamples/RyaMongoGeoDirectExample.java
index e42ce07..ede3f98 100644
--- a/extras/rya.geoindexing/geo.mongo/src/main/java/org/apache/rya/indexing/geoExamples/RyaMongoGeoDirectExample.java
+++ b/extras/rya.geoindexing/geo.mongo/src/main/java/org/apache/rya/indexing/geoExamples/RyaMongoGeoDirectExample.java
@@ -29,7 +29,7 @@ import org.apache.rya.indexing.accumulo.ConfigUtils;
 import org.apache.rya.indexing.accumulo.geo.OptionalConfigUtils;
 import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration;
 import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration.MongoDBIndexingConfigBuilder;
-import org.apache.rya.mongodb.MockMongoFactory;
+import org.apache.rya.mongodb.EmbeddedMongoFactory;
 import org.apache.rya.mongodb.MongoConnectorFactory;
 import org.openrdf.model.vocabulary.RDFS;
 import org.openrdf.query.BindingSet;
@@ -172,14 +172,14 @@ public class RyaMongoGeoDirectExample {
         }
     }
 
-    private static MockMongoFactory mock = null;
+    private static EmbeddedMongoFactory mock = null;
     private static Configuration getConf() throws IOException {
 
     	MongoDBIndexingConfigBuilder builder = MongoIndexingConfiguration.builder()
     		.setUseMockMongo(USE_MOCK).setUseInference(USE_INFER).setAuths("U");
 
         if (USE_MOCK) {
-            mock = MockMongoFactory.newFactory();
+            mock = EmbeddedMongoFactory.newFactory();
             MongoClient c = mock.newMongoClient();
             ServerAddress address = c.getAddress();
             String url = address.getHost();

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/geotemporal/mongo/MongoITBase.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/geotemporal/mongo/MongoITBase.java b/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/geotemporal/mongo/MongoITBase.java
index 7488572..2ca2780 100644
--- a/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/geotemporal/mongo/MongoITBase.java
+++ b/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/geotemporal/mongo/MongoITBase.java
@@ -21,7 +21,7 @@ package org.apache.rya.indexing.geotemporal.mongo;
 import org.apache.rya.indexing.accumulo.ConfigUtils;
 import org.apache.rya.indexing.geotemporal.GeoTemporalTestBase;
 import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration;
-import org.apache.rya.mongodb.MockMongoSingleton;
+import org.apache.rya.mongodb.EmbeddedMongoSingleton;
 import org.junit.After;
 import org.junit.Before;
 
@@ -38,7 +38,7 @@ public class MongoITBase extends GeoTemporalTestBase {
 
     @Before
     public void setupTest() throws Exception {
-        mongoClient = MockMongoSingleton.getInstance();
+        mongoClient = EmbeddedMongoSingleton.getInstance();
         conf = MongoIndexingConfiguration.builder()
             .setMongoCollectionPrefix("test_")
             .setMongoDBName("testDB")

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/26e9214c/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoIndexerDeleteIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoIndexerDeleteIT.java b/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoIndexerDeleteIT.java
index 65f37c3..5751887 100644
--- a/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoIndexerDeleteIT.java
+++ b/extras/rya.geoindexing/geo.mongo/src/test/java/org/apache/rya/indexing/mongo/MongoIndexerDeleteIT.java
@@ -30,7 +30,7 @@ import org.apache.rya.indexing.TemporalInstantRfc3339;
 import org.apache.rya.indexing.accumulo.ConfigUtils;
 import org.apache.rya.indexing.accumulo.geo.OptionalConfigUtils;
 import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration;
-import org.apache.rya.mongodb.MockMongoSingleton;
+import org.apache.rya.mongodb.EmbeddedMongoSingleton;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -70,7 +70,7 @@ public class MongoIndexerDeleteIT {
             .setMongoTemporalPredicates("Property:atTime")
             .build();
 
-        client = MockMongoSingleton.getInstance();
+        client = EmbeddedMongoSingleton.getInstance();
         indxrConf.setBoolean(OptionalConfigUtils.USE_GEO, true);
         indxrConf.set(ConfigUtils.GEO_PREDICATES_LIST, "http://www.opengis.net/ont/geosparql#asWKT");
         indxrConf.setBoolean(ConfigUtils.USE_MONGO, true);


[07/11] incubator-rya git commit: RYA-401 Fixed all default charset bugs. Closes #243.

Posted by ca...@apache.org.
RYA-401 Fixed all default charset bugs. Closes #243.

- Added findbugs policy to build.
- removed the maven-surefire-plugin's -Dfile.encoding=UTF-8 argLine

Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/538cfccc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/538cfccc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/538cfccc

Branch: refs/heads/master
Commit: 538cfccc8ce7b6cfda564a0160f6382209fdfdff
Parents: 4089e70
Author: jdasch <hc...@gmail.com>
Authored: Wed Oct 11 09:10:38 2017 -0400
Committer: jdasch <hc...@gmail.com>
Committed: Thu Oct 12 12:52:52 2017 -0400

----------------------------------------------------------------------
 .../rya/api/RdfCloudTripleStoreConstants.java   |   21 +-
 .../org/apache/rya/api/domain/RyaStatement.java |  120 +-
 .../java/org/apache/rya/api/domain/RyaType.java |   58 +-
 .../strategy/AbstractTriplePatternStrategy.java |   26 +-
 .../AbstractHashedTriplePatternStrategy.java    |   26 +-
 .../HashedPoWholeRowTriplePatternStrategy.java  |   73 +-
 .../HashedSpoWholeRowTriplePatternStrategy.java |   89 +-
 .../OspWholeRowTriplePatternStrategy.java       |   36 +-
 .../PoWholeRowTriplePatternStrategy.java        |   70 +-
 .../SpoWholeRowTriplePatternStrategy.java       |   62 +-
 .../resolver/impl/CustomDatatypeResolver.java   |   37 +-
 .../api/resolver/impl/RyaTypeResolverImpl.java  |   42 +-
 .../impl/WholeRowHashedTripleResolver.java      |  137 ++-
 .../triple/impl/WholeRowTripleResolver.java     |  109 +-
 .../AccumuloNamespaceTableIterator.java         |   44 +-
 .../rya/accumulo/AccumuloRdfEvalStatsDAO.java   |   85 +-
 .../org/apache/rya/accumulo/AccumuloRyaDAO.java |   10 +-
 .../dao/SimpleMongoDBNamespaceManager.java      |   88 +-
 .../dao/SimpleMongoDBStorageStrategy.java       |    6 +-
 .../DocumentIndexIntersectingIterator.java      |   18 +-
 .../java/org/apache/rya/indexing/KeyParts.java  |    7 +-
 .../accumulo/entity/AccumuloDocIdIndexer.java   |  117 +-
 .../accumulo/entity/EntityCentricIndex.java     |   56 +-
 .../rya/indexing/accumulo/entity/StarQuery.java |  123 +-
 .../freetext/iterators/AndingIterator.java      |  169 +--
 .../freetext/query/SimpleCharStream.java        | 1057 ++----------------
 .../src/main/java/RyaDirectExample.java         |   14 +-
 .../serialization/BindingSetSerDe.java          |   47 +-
 .../export/accumulo/util/AccumuloRyaUtils.java  |    5 +-
 .../client/merge/VisibilityStatementMerger.java |    8 +-
 .../apache/rya/accumulo/mr/merge/CopyTool.java  |   17 +-
 .../mr/merge/util/AccumuloRyaUtils.java         |   19 +-
 .../accumulo/mr/merge/util/QueryRuleset.java    |   48 +-
 .../pcj/fluo/demo/FluoAndHistoricPcjsDemo.java  |    3 +-
 .../joinselect/mr/JoinSelectProspectOutput.java |   64 +-
 .../rya/prospector/plans/impl/CountPlan.java    |   17 +-
 .../rya/prospector/utils/ProspectorUtils.java   |   19 +-
 .../rya/reasoning/mr/ConformanceTest.java       |  118 +-
 .../rya/accumulo/mr/GraphXEdgeInputFormat.java  |   74 +-
 .../apache/rya/accumulo/mr/RyaOutputFormat.java |  155 +--
 .../accumulo/mr/tools/AccumuloRdfCountTool.java |  100 +-
 .../apache/rya/camel/cbsail/CbSailProducer.java |   99 +-
 .../rya/accumulo/pig/AccumuloStorage.java       |  145 +--
 .../rya/accumulo/pig/IndexWritingTool.java      |  168 ++-
 .../rya/accumulo/pig/SparqlQueryPigEngine.java  |  111 +-
 .../accumulo/pig/StatementPatternStorage.java   |   38 +-
 pom.xml                                         |   30 +-
 .../RdfCloudTripleStoreConnection.java          |    8 +-
 48 files changed, 1615 insertions(+), 2378 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConstants.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConstants.java b/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConstants.java
index 2092951..dd7ada0 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConstants.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/RdfCloudTripleStoreConstants.java
@@ -1,7 +1,3 @@
-package org.apache.rya.api;
-
-import org.apache.hadoop.io.Text;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -10,9 +6,9 @@ import org.apache.hadoop.io.Text;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -20,9 +16,11 @@ import org.apache.hadoop.io.Text;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api;
 
+import java.nio.charset.StandardCharsets;
 
-
+import org.apache.hadoop.io.Text;
 import org.apache.rya.api.domain.RyaSchema;
 import org.apache.rya.api.domain.RyaType;
 import org.apache.rya.api.domain.RyaURI;
@@ -89,8 +87,9 @@ public class RdfCloudTripleStoreConstants {
     public static Text TBL_NAMESPACE_TXT = new Text(TBL_NAMESPACE);
 
     public static void prefixTables(String prefix) {
-        if (prefix == null)
+        if (prefix == null) {
             prefix = TBL_PRFX_DEF;
+        }
         TBL_SPO = prefix + TBL_SPO_SUFFIX;
         TBL_PO = prefix + TBL_PO_SUFFIX;
         TBL_OSP = prefix + TBL_OSP_SUFFIX;
@@ -115,9 +114,9 @@ public class RdfCloudTripleStoreConstants {
     public static final String DELIM = "\u0000";
     public static final String DELIM_STOP = "\u0001";
     public static final String LAST = "\uFFDD";
-    public static final String TYPE_DELIM = new String(TYPE_DELIM_BYTES);
-    public static final byte[] DELIM_BYTES = DELIM.getBytes();
-    public static final byte[] DELIM_STOP_BYTES = DELIM_STOP.getBytes();
+    public static final String TYPE_DELIM = new String(TYPE_DELIM_BYTES, StandardCharsets.UTF_8);
+    public static final byte[] DELIM_BYTES = DELIM.getBytes(StandardCharsets.UTF_8);
+    public static final byte[] DELIM_STOP_BYTES = DELIM_STOP.getBytes(StandardCharsets.UTF_8);
 
 
     /* RECORD TYPES */

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaStatement.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaStatement.java b/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaStatement.java
index de41be9..eac6740 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaStatement.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaStatement.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.domain;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.domain;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,9 +16,9 @@ package org.apache.rya.api.domain;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.domain;
 
-
-
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 
 /**
@@ -40,34 +38,34 @@ public class RyaStatement {
     public RyaStatement() {
     }
 
-    public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object) {
+    public RyaStatement(final RyaURI subject, final RyaURI predicate, final RyaType object) {
         this(subject, predicate, object, null);
     }
 
-    public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) {
+    public RyaStatement(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context) {
         this(subject, predicate, object, context, null);
     }
 
 
-    public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier) {
+    public RyaStatement(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context, final String qualifier) {
         this(subject, predicate, object, context, qualifier, new StatementMetadata());
     }
 
-    public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier, StatementMetadata metadata) {
+    public RyaStatement(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context, final String qualifier, final StatementMetadata metadata) {
         this(subject, predicate, object, context, qualifier, metadata, null);
     }
 
-    public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier, StatementMetadata metadata, byte[] columnVisibility) {
+    public RyaStatement(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context, final String qualifier, final StatementMetadata metadata, final byte[] columnVisibility) {
         this(subject, predicate, object, context, qualifier, columnVisibility, metadata.toBytes());
     }
 
     @Deprecated
-    public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier, byte[] columnVisibility, byte[] value) {
+    public RyaStatement(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context, final String qualifier, final byte[] columnVisibility, final byte[] value) {
         this(subject, predicate, object, context, qualifier, columnVisibility, value, null);
     }
 
     @Deprecated
-    public RyaStatement(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context, String qualifier, byte[] columnVisibility, byte[] value, Long timestamp) {
+    public RyaStatement(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context, final String qualifier, final byte[] columnVisibility, final byte[] value, final Long timestamp) {
         this.subject = subject;
         this.predicate = predicate;
         this.object = object;
@@ -82,7 +80,7 @@ public class RyaStatement {
         return subject;
     }
 
-    public void setSubject(RyaURI subject) {
+    public void setSubject(final RyaURI subject) {
         this.subject = subject;
     }
 
@@ -90,7 +88,7 @@ public class RyaStatement {
         return predicate;
     }
 
-    public void setPredicate(RyaURI predicate) {
+    public void setPredicate(final RyaURI predicate) {
         this.predicate = predicate;
     }
 
@@ -98,7 +96,7 @@ public class RyaStatement {
         return object;
     }
 
-    public void setObject(RyaType object) {
+    public void setObject(final RyaType object) {
         this.object = object;
     }
 
@@ -106,7 +104,7 @@ public class RyaStatement {
         return context;
     }
 
-    public void setContext(RyaURI context) {
+    public void setContext(final RyaURI context) {
         this.context = context;
     }
 
@@ -114,22 +112,22 @@ public class RyaStatement {
         return columnVisibility;
     }
 
-    public void setColumnVisibility(byte[] columnVisibility) {
+    public void setColumnVisibility(final byte[] columnVisibility) {
         this.columnVisibility = columnVisibility;
     }
-    
+
     public StatementMetadata getMetadata() {
-        // try to deserialize the value, if not assume that there was 
+        // try to deserialize the value, if not assume that there was
         // no explicit metadata
         try {
             return new StatementMetadata(value);
         }
-        catch (Exception ex){
+        catch (final Exception ex){
             return null;
         }
     }
-    
-    public void setStatementMetadata(StatementMetadata metadata){
+
+    public void setStatementMetadata(final StatementMetadata metadata){
         this.value = metadata.toBytes();
     }
 
@@ -139,7 +137,7 @@ public class RyaStatement {
     }
 
     @Deprecated
-    public void setValue(byte[] value) {
+    public void setValue(final byte[] value) {
         this.value = value;
     }
 
@@ -147,25 +145,45 @@ public class RyaStatement {
         return timestamp;
     }
 
-    public void setTimestamp(Long timestamp) {
+    public void setTimestamp(final Long timestamp) {
         this.timestamp = timestamp;
     }
 
     @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
+    public boolean equals(final Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
 
-        RyaStatement that = (RyaStatement) o;
+        final RyaStatement that = (RyaStatement) o;
 
-        if (!Arrays.equals(columnVisibility, that.columnVisibility)) return false;
-        if (context != null ? !context.equals(that.context) : that.context != null) return false;
-        if (object != null ? !object.equals(that.object) : that.object != null) return false;
-        if (predicate != null ? !predicate.equals(that.predicate) : that.predicate != null) return false;
-        if (qualifer != null ? !qualifer.equals(that.qualifer) : that.qualifer != null) return false;
-        if (subject != null ? !subject.equals(that.subject) : that.subject != null) return false;
-        if (timestamp != null ? !timestamp.equals(that.timestamp) : that.timestamp != null) return false;
-        if (!Arrays.equals(value, that.value)) return false;
+        if (!Arrays.equals(columnVisibility, that.columnVisibility)) {
+            return false;
+        }
+        if (context != null ? !context.equals(that.context) : that.context != null) {
+            return false;
+        }
+        if (object != null ? !object.equals(that.object) : that.object != null) {
+            return false;
+        }
+        if (predicate != null ? !predicate.equals(that.predicate) : that.predicate != null) {
+            return false;
+        }
+        if (qualifer != null ? !qualifer.equals(that.qualifer) : that.qualifer != null) {
+            return false;
+        }
+        if (subject != null ? !subject.equals(that.subject) : that.subject != null) {
+            return false;
+        }
+        if (timestamp != null ? !timestamp.equals(that.timestamp) : that.timestamp != null) {
+            return false;
+        }
+        if (!Arrays.equals(value, that.value)) {
+            return false;
+        }
 
         return true;
     }
@@ -187,7 +205,7 @@ public class RyaStatement {
         return qualifer;
     }
 
-    public void setQualifer(String qualifer) {
+    public void setQualifer(final String qualifer) {
         this.qualifer = qualifer;
     }
 
@@ -200,8 +218,8 @@ public class RyaStatement {
         sb.append(", object=").append(object);
         sb.append(", context=").append(context);
         sb.append(", qualifier=").append(qualifer);
-        sb.append(", columnVisibility=").append(columnVisibility == null ? "null" : new String(columnVisibility));
-        sb.append(", value=").append(value == null ? "null" : new String(value));
+        sb.append(", columnVisibility=").append(columnVisibility == null ? "null" : new String(columnVisibility, StandardCharsets.UTF_8));
+        sb.append(", value=").append(value == null ? "null" : new String(value, StandardCharsets.UTF_8));
         sb.append(", timestamp=").append(timestamp);
         sb.append('}');
         return sb.toString();
@@ -211,7 +229,7 @@ public class RyaStatement {
         return new RyaStatementBuilder();
     }
 
-    public static RyaStatementBuilder builder(RyaStatement ryaStatement) {
+    public static RyaStatementBuilder builder(final RyaStatement ryaStatement) {
         return new RyaStatementBuilder(ryaStatement);
     }
 
@@ -225,52 +243,52 @@ public class RyaStatement {
             ryaStatement = new RyaStatement();
         }
 
-        public RyaStatementBuilder(RyaStatement ryaStatement) {
+        public RyaStatementBuilder(final RyaStatement ryaStatement) {
             this.ryaStatement = ryaStatement;
         }
 
-        public RyaStatementBuilder setTimestamp(Long timestamp) {
+        public RyaStatementBuilder setTimestamp(final Long timestamp) {
             ryaStatement.setTimestamp(timestamp);
             return this;
         }
 
         @Deprecated
-        public RyaStatementBuilder setValue(byte[] value) {
+        public RyaStatementBuilder setValue(final byte[] value) {
             ryaStatement.setValue(value);
             return this;
         }
 
-        public RyaStatementBuilder setMetadata(StatementMetadata metadata) {
+        public RyaStatementBuilder setMetadata(final StatementMetadata metadata) {
             ryaStatement.setValue(metadata.toBytes());
             return this;
         }
 
-        public RyaStatementBuilder setColumnVisibility(byte[] columnVisibility) {
+        public RyaStatementBuilder setColumnVisibility(final byte[] columnVisibility) {
             ryaStatement.setColumnVisibility(columnVisibility);
             return this;
         }
 
-        public RyaStatementBuilder setQualifier(String str) {
+        public RyaStatementBuilder setQualifier(final String str) {
             ryaStatement.setQualifer(str);
             return this;
         }
 
-        public RyaStatementBuilder setContext(RyaURI ryaURI) {
+        public RyaStatementBuilder setContext(final RyaURI ryaURI) {
             ryaStatement.setContext(ryaURI);
             return this;
         }
 
-        public RyaStatementBuilder setSubject(RyaURI ryaURI) {
+        public RyaStatementBuilder setSubject(final RyaURI ryaURI) {
             ryaStatement.setSubject(ryaURI);
             return this;
         }
 
-        public RyaStatementBuilder setPredicate(RyaURI ryaURI) {
+        public RyaStatementBuilder setPredicate(final RyaURI ryaURI) {
             ryaStatement.setPredicate(ryaURI);
             return this;
         }
 
-        public RyaStatementBuilder setObject(RyaType ryaType) {
+        public RyaStatementBuilder setObject(final RyaType ryaType) {
             ryaStatement.setObject(ryaType);
             return this;
         }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaType.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaType.java b/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaType.java
index 94a0ecf..ab5306e 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaType.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/domain/RyaType.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.domain;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.domain;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,9 +16,7 @@ package org.apache.rya.api.domain;
  * specific language governing permissions and limitations
  * under the License.
  */
-
-
-
+package org.apache.rya.api.domain;
 
 import org.openrdf.model.URI;
 import org.openrdf.model.vocabulary.XMLSchema;
@@ -39,12 +35,12 @@ public class RyaType implements Comparable {
         setDataType(XMLSchema.STRING);
     }
 
-    public RyaType(String data) {
+    public RyaType(final String data) {
         this(XMLSchema.STRING, data);
     }
 
 
-    public RyaType(URI dataType, String data) {
+    public RyaType(final URI dataType, final String data) {
         setDataType(dataType);
         setData(data);
     }
@@ -62,11 +58,11 @@ public class RyaType implements Comparable {
         return data;
     }
 
-    public void setDataType(URI dataType) {
+    public void setDataType(final URI dataType) {
         this.dataType = dataType;
     }
 
-    public void setData(String data) {
+    public void setData(final String data) {
         this.data = data;
     }
 
@@ -86,12 +82,20 @@ public class RyaType implements Comparable {
      * @return true if the other object is also a RyaType and both data and datatype match.
      */
     @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || !(o instanceof RyaType)) return false;
-        RyaType ryaType = (RyaType) o;
-        if (data != null ? !data.equals(ryaType.data) : ryaType.data != null) return false;
-        if (dataType != null ? !dataType.equals(ryaType.dataType) : ryaType.dataType != null) return false;
+    public boolean equals(final Object o) {
+        if (this == o) {
+            return true;
+        }
+        if (o == null || !(o instanceof RyaType)) {
+            return false;
+        }
+        final RyaType ryaType = (RyaType) o;
+        if (data != null ? !data.equals(ryaType.data) : ryaType.data != null) {
+            return false;
+        }
+        if (dataType != null ? !dataType.equals(ryaType.dataType) : ryaType.dataType != null) {
+            return false;
+        }
         return true;
     }
 
@@ -114,19 +118,27 @@ public class RyaType implements Comparable {
      *          Otherwise, an integer whose sign yields a consistent ordering.
      */
     @Override
-    public int compareTo(Object o) {
+    public int compareTo(final Object o) {
         int result = -1;
         if (o != null && o instanceof RyaType) {
             result = 0;
-            RyaType other = (RyaType) o;
+            final RyaType other = (RyaType) o;
             if (this.data != other.data) {
-                if (this.data == null) return 1;
-                if (other.data == null) return -1;
+                if (this.data == null) {
+                    return 1;
+                }
+                if (other.data == null) {
+                    return -1;
+                }
                 result = this.data.compareTo(other.data);
             }
             if (result == 0 && this.dataType != other.dataType) {
-                if (this.dataType == null) return 1;
-                if (other.dataType == null) return -1;
+                if (this.dataType == null) {
+                    return 1;
+                }
+                if (other.dataType == null) {
+                    return -1;
+                }
                 result = this.dataType.toString().compareTo(other.dataType.toString());
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/AbstractTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/AbstractTriplePatternStrategy.java b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/AbstractTriplePatternStrategy.java
index 0224787..814fe5f 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/AbstractTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/AbstractTriplePatternStrategy.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.query.strategy;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.query.strategy;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,16 +16,17 @@ package org.apache.rya.api.query.strategy;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.query.strategy;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM;
 
+import java.nio.charset.StandardCharsets;
 
-import com.google.common.base.Preconditions;
 import org.apache.rya.api.RdfCloudTripleStoreConstants;
-import org.apache.rya.api.resolver.RyaContext;
 import org.apache.rya.api.resolver.triple.TripleRowRegex;
 
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM;
+import com.google.common.base.Preconditions;
 
 /**
  * Date: 7/14/12
@@ -36,16 +35,17 @@ import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM;
 public abstract class AbstractTriplePatternStrategy implements TriplePatternStrategy {
     public static final String ALL_REGEX = "([\\s\\S]*)";
 
+    @Override
     public abstract RdfCloudTripleStoreConstants.TABLE_LAYOUT getLayout();
 
     @Override
-    public TripleRowRegex buildRegex(String subject, String predicate, String object, String context, byte[] objectTypeInfo) {
-        RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = getLayout();
+    public TripleRowRegex buildRegex(final String subject, final String predicate, final String object, final String context, final byte[] objectTypeInfo) {
+        final RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = getLayout();
         Preconditions.checkNotNull(table_layout);
         if (subject == null && predicate == null && object == null && context == null && objectTypeInfo == null) {
             return null; //no regex
         }
-        StringBuilder sb = new StringBuilder();
+        final StringBuilder sb = new StringBuilder();
         String first = subject;
         String second = predicate;
         String third = object;
@@ -79,12 +79,12 @@ public abstract class AbstractTriplePatternStrategy implements TriplePatternStra
                 sb.append(TYPE_DELIM);
                 sb.append(ALL_REGEX);
             }else {
-                sb.append(new String(objectTypeInfo));
+                sb.append(new String(objectTypeInfo, StandardCharsets.UTF_8));
             }
         }else {
             sb.append(ALL_REGEX);
             if (objectTypeInfo != null) {
-                sb.append(new String(objectTypeInfo));
+                sb.append(new String(objectTypeInfo, StandardCharsets.UTF_8));
             }
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/AbstractHashedTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/AbstractHashedTriplePatternStrategy.java b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/AbstractHashedTriplePatternStrategy.java
index 60853f1..140a301 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/AbstractHashedTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/AbstractHashedTriplePatternStrategy.java
@@ -1,4 +1,3 @@
-package org.apache.rya.api.query.strategy.wholerow;
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -7,9 +6,9 @@ package org.apache.rya.api.query.strategy.wholerow;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -17,24 +16,29 @@ package org.apache.rya.api.query.strategy.wholerow;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.query.strategy.wholerow;
+
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM;
+
+import java.nio.charset.StandardCharsets;
 
-import com.google.common.base.Preconditions;
 import org.apache.rya.api.RdfCloudTripleStoreConstants;
 import org.apache.rya.api.query.strategy.AbstractTriplePatternStrategy;
 import org.apache.rya.api.query.strategy.TriplePatternStrategy;
 import org.apache.rya.api.resolver.triple.TripleRowRegex;
 
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM;
+import com.google.common.base.Preconditions;
 
 public abstract class AbstractHashedTriplePatternStrategy extends AbstractTriplePatternStrategy implements TriplePatternStrategy {
     public static final String HASHED_ALL_REGEX = "([0-9a-f]{32})\u0000";
 
+    @Override
     public abstract RdfCloudTripleStoreConstants.TABLE_LAYOUT getLayout();
 
     @Override
-    public TripleRowRegex buildRegex(String subject, String predicate, String object, String context, byte[] objectTypeInfo) {
-        RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = getLayout();
+    public TripleRowRegex buildRegex(final String subject, final String predicate, final String object, final String context, final byte[] objectTypeInfo) {
+        final RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = getLayout();
         Preconditions.checkNotNull(table_layout);
 
         //O is not hashed so kick out to super
@@ -45,7 +49,7 @@ public abstract class AbstractHashedTriplePatternStrategy extends AbstractTriple
         if (subject == null && predicate == null && object == null && context == null && objectTypeInfo == null) {
             return null; //no regex
         }
-        StringBuilder sb = new StringBuilder();
+        final StringBuilder sb = new StringBuilder();
         String first = subject;
         String second = predicate;
         String third = object;
@@ -76,12 +80,12 @@ public abstract class AbstractHashedTriplePatternStrategy extends AbstractTriple
                 sb.append(TYPE_DELIM);
                 sb.append(ALL_REGEX);
             }else {
-                sb.append(new String(objectTypeInfo));
+                sb.append(new String(objectTypeInfo, StandardCharsets.UTF_8));
             }
         }else {
             sb.append(ALL_REGEX);
             if (objectTypeInfo != null) {
-                sb.append(new String(objectTypeInfo));
+                sb.append(new String(objectTypeInfo, StandardCharsets.UTF_8));
             }
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java
index f1abba9..6e8bb31 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedPoWholeRowTriplePatternStrategy.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.query.strategy.wholerow;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.query.strategy.wholerow;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,22 +16,19 @@ package org.apache.rya.api.query.strategy.wholerow;
  * specific language governing permissions and limitations
  * under the License.
  */
-
-
+package org.apache.rya.api.query.strategy.wholerow;
 
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Map;
 
 import org.apache.commons.codec.binary.Hex;
-
-import com.google.common.primitives.Bytes;
-
 import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
 import org.apache.rya.api.RdfCloudTripleStoreConstants;
 import org.apache.rya.api.RdfCloudTripleStoreUtils;
@@ -44,6 +39,8 @@ import org.apache.rya.api.query.strategy.ByteRange;
 import org.apache.rya.api.resolver.RyaContext;
 import org.apache.rya.api.resolver.RyaTypeResolverException;
 
+import com.google.common.primitives.Bytes;
+
 /**
  * Date: 7/14/12
  * Time: 7:35 AM
@@ -57,31 +54,33 @@ public class HashedPoWholeRowTriplePatternStrategy extends AbstractHashedTripleP
 
     @Override
     public Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT,
-            ByteRange> defineRange(RyaURI subject, RyaURI predicate, RyaType object,
-                                   RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException {
+            ByteRange> defineRange(final RyaURI subject, final RyaURI predicate, final RyaType object,
+                                   final RyaURI context, final RdfCloudTripleStoreConfiguration conf) throws IOException {
         try {
             //po(ng)
             //po_r(s)(ng)
             //p(ng)
             //p_r(o)(ng)
             //r(p)(ng)
-            if (!handles(subject, predicate, object, context)) return null;
+            if (!handles(subject, predicate, object, context)) {
+                return null;
+            }
+
+            final RyaContext ryaContext = RyaContext.getInstance();
+            final MessageDigest md = MessageDigest.getInstance("MD5");
 
-            RyaContext ryaContext = RyaContext.getInstance();
-            MessageDigest md = MessageDigest.getInstance("MD5");
-            
 
-            RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO;
+            final RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO;
             byte[] start, stop;
             if (object != null) {
                 if (object instanceof RyaRange) {
                     //p_r(o)
                     RyaRange rv = (RyaRange) object;
                     rv = ryaContext.transformRange(rv);
-                    byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0];
-                    byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0];
-                    byte[] predBytes = predicate.getData().getBytes();
-                    byte[] predHash = Hex.encodeHexString(md.digest(predBytes)).getBytes();
+                    final byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0];
+                    final byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0];
+                    final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
+                    final byte[] predHash = Hex.encodeHexString(md.digest(predBytes)).getBytes(StandardCharsets.UTF_8);
                     start = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES, objStartBytes);
                     stop = Bytes.concat(predHash, DELIM_BYTES, predBytes,DELIM_BYTES, objEndBytes, DELIM_BYTES, LAST_BYTES);
                 } else {
@@ -89,48 +88,52 @@ public class HashedPoWholeRowTriplePatternStrategy extends AbstractHashedTripleP
                         //po_r(s)
                         RyaRange ru = (RyaRange) subject;
                         ru = ryaContext.transformRange(ru);
-                        byte[] subjStartBytes = ru.getStart().getData().getBytes();
-                        byte[] subjStopBytes = ru.getStop().getData().getBytes();
-                        byte[] predBytes = predicate.getData().getBytes();
-                        byte[] predHash = Hex.encodeHexString(md.digest(predBytes)).getBytes();
-                        byte[] objBytes = ryaContext.serializeType(object)[0];
+                        final byte[] subjStartBytes = ru.getStart().getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] subjStopBytes = ru.getStop().getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] predHash = Hex.encodeHexString(md.digest(predBytes)).getBytes(StandardCharsets.UTF_8);
+                        final byte[] objBytes = ryaContext.serializeType(object)[0];
                         start = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES, objBytes, DELIM_BYTES, subjStartBytes);
                         stop = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES, objBytes, DELIM_BYTES, subjStopBytes, TYPE_DELIM_BYTES, LAST_BYTES);
                     } else {
                         //po
                         //TODO: There must be a better way than creating multiple byte[]
-                        byte[] predBytes = predicate.getData().getBytes();
-                        byte[] predHash = Hex.encodeHexString(md.digest(predBytes)).getBytes();
-                        byte[] objBytes = ryaContext.serializeType(object)[0];
+                        final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] predHash = Hex.encodeHexString(md.digest(predBytes)).getBytes(StandardCharsets.UTF_8);
+                        final byte[] objBytes = ryaContext.serializeType(object)[0];
                         start = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES, objBytes, DELIM_BYTES);
                         stop = Bytes.concat(start, LAST_BYTES);
                     }
                 }
             } else {
                 //p
-                byte[] predBytes = predicate.getData().getBytes();
-                byte[] predHash = Hex.encodeHexString(md.digest(predBytes)).getBytes();
+                final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
+                final byte[] predHash = Hex.encodeHexString(md.digest(predBytes)).getBytes(StandardCharsets.UTF_8);
                 start = Bytes.concat(predHash, DELIM_BYTES, predBytes, DELIM_BYTES);
                 stop = Bytes.concat(start, LAST_BYTES);
             }
             return new RdfCloudTripleStoreUtils.CustomEntry<RdfCloudTripleStoreConstants.TABLE_LAYOUT,
                     ByteRange>(table_layout, new ByteRange(start, stop));
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new IOException(e);
-        } catch (NoSuchAlgorithmException e) {
+        } catch (final NoSuchAlgorithmException e) {
         	throw new IOException(e);
 		}
     }
 
     @Override
-    public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) {
+    public boolean handles(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context) {
         //po(ng)
         //p_r(o)(ng)
         //po_r(s)(ng)
         //p(ng)
         //r(p)(ng)
-        if ((predicate == null) || (predicate instanceof RyaRange)) return false;
-        if (subject != null && !(subject instanceof RyaRange)) return false;
+        if ((predicate == null) || (predicate instanceof RyaRange)) {
+            return false;
+        }
+        if (subject != null && !(subject instanceof RyaRange)) {
+            return false;
+        }
         return subject == null || object != null;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java
index c650b89..c87d268 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/HashedSpoWholeRowTriplePatternStrategy.java
@@ -1,16 +1,3 @@
-package org.apache.rya.api.query.strategy.wholerow;
-
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
-
-import java.io.IOException;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.Map;
-
-import org.apache.commons.codec.binary.Hex;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -19,9 +6,9 @@ import org.apache.commons.codec.binary.Hex;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -29,11 +16,19 @@ import org.apache.commons.codec.binary.Hex;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.query.strategy.wholerow;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
 
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Map;
 
-import com.google.common.primitives.Bytes;
-
+import org.apache.commons.codec.binary.Hex;
 import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
 import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
 import org.apache.rya.api.RdfCloudTripleStoreUtils;
@@ -45,6 +40,8 @@ import org.apache.rya.api.query.strategy.ByteRange;
 import org.apache.rya.api.resolver.RyaContext;
 import org.apache.rya.api.resolver.RyaTypeResolverException;
 
+import com.google.common.primitives.Bytes;
+
 /**
  * Date: 7/14/12
  * Time: 7:35 AM
@@ -58,20 +55,22 @@ public class HashedSpoWholeRowTriplePatternStrategy extends AbstractHashedTriple
     }
 
     @Override
-    public Map.Entry<TABLE_LAYOUT, ByteRange> defineRange(RyaURI subject, RyaURI predicate, RyaType object,
-                                                          RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException {
+    public Map.Entry<TABLE_LAYOUT, ByteRange> defineRange(final RyaURI subject, final RyaURI predicate, final RyaType object,
+                                                          final RyaURI context, final RdfCloudTripleStoreConfiguration conf) throws IOException {
         try {
             //spo(ng)
             //sp(ng)
             //s(ng)
             //sp_r(o)(ng)
             //s_r(p)(ng)
-            if (!handles(subject, predicate, object, context)) return null;
-            MessageDigest md = MessageDigest.getInstance("MD5");
-            
-            RyaContext ryaContext = RyaContext.getInstance();
+            if (!handles(subject, predicate, object, context)) {
+                return null;
+            }
+            final MessageDigest md = MessageDigest.getInstance("MD5");
+
+            final RyaContext ryaContext = RyaContext.getInstance();
 
-            TABLE_LAYOUT table_layout = TABLE_LAYOUT.SPO;
+            final TABLE_LAYOUT table_layout = TABLE_LAYOUT.SPO;
             byte[] start;
             byte[] stop;
             if (predicate != null) {
@@ -81,21 +80,21 @@ public class HashedSpoWholeRowTriplePatternStrategy extends AbstractHashedTriple
                         //range = sp_r(o.s)->sp_r(o.e) (remove last byte to remove type info)
                         RyaRange rv = (RyaRange) object;
                         rv = ryaContext.transformRange(rv);
-                        byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0];
-                        byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0];
-                        byte[] subjBytes = subject.getData().getBytes();
-                        byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes();
-                        byte[] predBytes = predicate.getData().getBytes();
+                        final byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0];
+                        final byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0];
+                        final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes(StandardCharsets.UTF_8);
+                        final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
                         start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predBytes, DELIM_BYTES, objStartBytes);
                         stop = Bytes.concat(hashSubj, DELIM_BYTES,subjBytes, DELIM_BYTES, predBytes, DELIM_BYTES, objEndBytes, DELIM_BYTES, LAST_BYTES);
                     } else {
                         //spo
                         //range = spo->spo (remove last byte to remove type info)
                         //TODO: There must be a better way than creating multiple byte[]
-                        byte[] subjBytes = subject.getData().getBytes();
-                        byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes();
-                         byte[] objBytes = ryaContext.serializeType(object)[0];
-                        start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predicate.getData().getBytes(), DELIM_BYTES, objBytes, TYPE_DELIM_BYTES);
+                        final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes(StandardCharsets.UTF_8);
+                         final byte[] objBytes = ryaContext.serializeType(object)[0];
+                        start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predicate.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES, objBytes, TYPE_DELIM_BYTES);
                         stop = Bytes.concat(start, LAST_BYTES);
                     }
                 } else if (predicate instanceof RyaRange) {
@@ -103,39 +102,39 @@ public class HashedSpoWholeRowTriplePatternStrategy extends AbstractHashedTriple
                     //range = s_r(p.s)->s_r(p.e)
                     RyaRange rv = (RyaRange) predicate;
                     rv = ryaContext.transformRange(rv);
-                    byte[] subjBytes = subject.getData().getBytes();
-                    byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes();
-                    byte[] predStartBytes = rv.getStart().getData().getBytes();
-                    byte[] predStopBytes = rv.getStop().getData().getBytes();
+                    final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+                    final byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes(StandardCharsets.UTF_8);
+                    final byte[] predStartBytes = rv.getStart().getData().getBytes(StandardCharsets.UTF_8);
+                    final byte[] predStopBytes = rv.getStop().getData().getBytes(StandardCharsets.UTF_8);
                     start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predStartBytes);
                     stop = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predStopBytes, DELIM_BYTES, LAST_BYTES);
                 } else {
                     //sp
                     //range = sp
-                    byte[] subjBytes = subject.getData().getBytes();
-                    byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes();
-                    start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predicate.getData().getBytes(), DELIM_BYTES);
+                    final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+                    final byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes(StandardCharsets.UTF_8);
+                    start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES, predicate.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES);
                     stop = Bytes.concat(start, LAST_BYTES);
                 }
             } else {
                 //s
                 //range = s
-                byte[] subjBytes = subject.getData().getBytes();
-                byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes();
+                final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+                final byte[] hashSubj = Hex.encodeHexString(md.digest(subjBytes)).getBytes(StandardCharsets.UTF_8);
                 start = Bytes.concat(hashSubj, DELIM_BYTES, subjBytes, DELIM_BYTES);
                 stop = Bytes.concat(start, LAST_BYTES);
             }
             return new RdfCloudTripleStoreUtils.CustomEntry<TABLE_LAYOUT, ByteRange>(table_layout,
                     new ByteRange(start, stop));
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new IOException(e);
-        } catch (NoSuchAlgorithmException e) {
+        } catch (final NoSuchAlgorithmException e) {
         	throw new IOException(e);
 		}
     }
 
     @Override
-    public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) {
+    public boolean handles(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context) {
         //if subject is not null and not a range (if predicate is null then object must be null)
         return (subject != null && !(subject instanceof RyaURIRange)) && !((predicate == null || predicate instanceof RyaURIRange) && (object != null));
     }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java
index 013dd8b..98861c0 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/OspWholeRowTriplePatternStrategy.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.query.strategy.wholerow;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.query.strategy.wholerow;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,13 +16,13 @@ package org.apache.rya.api.query.strategy.wholerow;
  * specific language governing permissions and limitations
  * under the License.
  */
-
-
+package org.apache.rya.api.query.strategy.wholerow;
 
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Map;
 
 import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
@@ -53,33 +51,35 @@ public class OspWholeRowTriplePatternStrategy extends AbstractTriplePatternStrat
 
     @Override
     public Map.Entry<TABLE_LAYOUT,
-            ByteRange> defineRange(RyaURI subject, RyaURI predicate, RyaType object,
-                                   RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException {
+            ByteRange> defineRange(final RyaURI subject, final RyaURI predicate, final RyaType object,
+                                   final RyaURI context, final RdfCloudTripleStoreConfiguration conf) throws IOException {
         try {
             //os(ng)
             //o_r(s)(ng)
             //o(ng)
             //r(o)
-            if (!handles(subject, predicate, object, context)) return null;
+            if (!handles(subject, predicate, object, context)) {
+                return null;
+            }
 
-            RyaContext ryaContext = RyaContext.getInstance();
+            final RyaContext ryaContext = RyaContext.getInstance();
 
-            TABLE_LAYOUT table_layout = TABLE_LAYOUT.OSP;
+            final TABLE_LAYOUT table_layout = TABLE_LAYOUT.OSP;
             byte[] start, stop;
             if (subject != null) {
                 if (subject instanceof RyaRange) {
                     //o_r(s)
                     RyaRange ru = (RyaRange) subject;
                     ru = ryaContext.transformRange(ru);
-                    byte[] subjStartBytes = ru.getStart().getData().getBytes();
-                    byte[] subjEndBytes = ru.getStop().getData().getBytes();
-                    byte[] objBytes = ryaContext.serializeType(object)[0];
+                    final byte[] subjStartBytes = ru.getStart().getData().getBytes(StandardCharsets.UTF_8);
+                    final byte[] subjEndBytes = ru.getStop().getData().getBytes(StandardCharsets.UTF_8);
+                    final byte[] objBytes = ryaContext.serializeType(object)[0];
                     start = Bytes.concat(objBytes, DELIM_BYTES, subjStartBytes);
                     stop = Bytes.concat(objBytes, DELIM_BYTES, subjEndBytes, DELIM_BYTES, LAST_BYTES);
                 } else {
                     //os
-                    byte[] objBytes = ryaContext.serializeType(object)[0];
-                    start = Bytes.concat(objBytes, DELIM_BYTES, subject.getData().getBytes(), DELIM_BYTES);
+                    final byte[] objBytes = ryaContext.serializeType(object)[0];
+                    start = Bytes.concat(objBytes, DELIM_BYTES, subject.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES);
                     stop = Bytes.concat(start, LAST_BYTES);
                 }
             } else {
@@ -97,13 +97,13 @@ public class OspWholeRowTriplePatternStrategy extends AbstractTriplePatternStrat
             }
             return new RdfCloudTripleStoreUtils.CustomEntry<TABLE_LAYOUT,
                     ByteRange>(table_layout, new ByteRange(start, stop));
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new IOException(e);
         }
     }
 
     @Override
-    public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) {
+    public boolean handles(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context) {
         //os(ng)
         //o_r(s)(ng)
         //o(ng)

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java
index 2667cb7..e09e5d0 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/PoWholeRowTriplePatternStrategy.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.query.strategy.wholerow;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.query.strategy.wholerow;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,10 +16,16 @@ package org.apache.rya.api.query.strategy.wholerow;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.query.strategy.wholerow;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
 
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
 
-import com.google.common.primitives.Bytes;
 import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
 import org.apache.rya.api.RdfCloudTripleStoreConstants;
 import org.apache.rya.api.RdfCloudTripleStoreUtils;
@@ -33,10 +37,7 @@ import org.apache.rya.api.query.strategy.ByteRange;
 import org.apache.rya.api.resolver.RyaContext;
 import org.apache.rya.api.resolver.RyaTypeResolverException;
 
-import java.io.IOException;
-import java.util.Map;
-
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.*;
+import com.google.common.primitives.Bytes;
 
 /**
  * Date: 7/14/12
@@ -51,28 +52,30 @@ public class PoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrate
 
     @Override
     public Map.Entry<RdfCloudTripleStoreConstants.TABLE_LAYOUT,
-            ByteRange> defineRange(RyaURI subject, RyaURI predicate, RyaType object,
-                                   RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException {
+            ByteRange> defineRange(final RyaURI subject, final RyaURI predicate, final RyaType object,
+                                   final RyaURI context, final RdfCloudTripleStoreConfiguration conf) throws IOException {
         try {
             //po(ng)
             //po_r(s)(ng)
             //p(ng)
             //p_r(o)(ng)
             //r(p)(ng)
-            if (!handles(subject, predicate, object, context)) return null;
+            if (!handles(subject, predicate, object, context)) {
+                return null;
+            }
 
-            RyaContext ryaContext = RyaContext.getInstance();
+            final RyaContext ryaContext = RyaContext.getInstance();
 
-            RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO;
+            final RdfCloudTripleStoreConstants.TABLE_LAYOUT table_layout = RdfCloudTripleStoreConstants.TABLE_LAYOUT.PO;
             byte[] start, stop;
             if (object != null) {
                 if (object instanceof RyaRange) {
                     //p_r(o)
                     RyaRange rv = (RyaRange) object;
                     rv = ryaContext.transformRange(rv);
-                    byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0];
-                    byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0];
-                    byte[] predBytes = predicate.getData().getBytes();
+                    final byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0];
+                    final byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0];
+                    final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
                     start = Bytes.concat(predBytes, DELIM_BYTES, objStartBytes);
                     stop = Bytes.concat(predBytes, DELIM_BYTES, objEndBytes, DELIM_BYTES, LAST_BYTES);
                 } else {
@@ -80,17 +83,17 @@ public class PoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrate
                         //po_r(s)
                         RyaRange ru = (RyaRange) subject;
                         ru = ryaContext.transformRange(ru);
-                        byte[] subjStartBytes = ru.getStart().getData().getBytes();
-                        byte[] subjStopBytes = ru.getStop().getData().getBytes();
-                        byte[] predBytes = predicate.getData().getBytes();
-                        byte[] objBytes = ryaContext.serializeType(object)[0];
+                        final byte[] subjStartBytes = ru.getStart().getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] subjStopBytes = ru.getStop().getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] objBytes = ryaContext.serializeType(object)[0];
                         start = Bytes.concat(predBytes, DELIM_BYTES, objBytes, DELIM_BYTES, subjStartBytes);
                         stop = Bytes.concat(predBytes, DELIM_BYTES, objBytes, DELIM_BYTES, subjStopBytes, TYPE_DELIM_BYTES, LAST_BYTES);
                     } else {
                         //po
                         //TODO: There must be a better way than creating multiple byte[]
-                        byte[] objBytes = ryaContext.serializeType(object)[0];
-                        start = Bytes.concat(predicate.getData().getBytes(), DELIM_BYTES, objBytes, DELIM_BYTES);
+                        final byte[] objBytes = ryaContext.serializeType(object)[0];
+                        start = Bytes.concat(predicate.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES, objBytes, DELIM_BYTES);
                         stop = Bytes.concat(start, LAST_BYTES);
                     }
                 }
@@ -98,31 +101,36 @@ public class PoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrate
                 //r(p)
                 RyaRange rv = (RyaRange) predicate;
                 rv = ryaContext.transformRange(rv);
-                start = rv.getStart().getData().getBytes();
-                stop = Bytes.concat(rv.getStop().getData().getBytes(), DELIM_BYTES, LAST_BYTES);
+                start = rv.getStart().getData().getBytes(StandardCharsets.UTF_8);
+                stop = Bytes.concat(rv.getStop().getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES, LAST_BYTES);
             } else {
                 //p
-                start = Bytes.concat(predicate.getData().getBytes(), DELIM_BYTES);
+                start = Bytes.concat(predicate.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES);
                 stop = Bytes.concat(start, LAST_BYTES);
             }
             return new RdfCloudTripleStoreUtils.CustomEntry<RdfCloudTripleStoreConstants.TABLE_LAYOUT,
                     ByteRange>(table_layout, new ByteRange(start, stop));
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new IOException(e);
         }
     }
 
     @Override
-    public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) {
+    public boolean handles(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context) {
         //po(ng)
         //p_r(o)(ng)
         //po_r(s)(ng)
         //p(ng)
         //r(p)(ng)
-        if (predicate == null) return false;
-        if (subject != null && !(subject instanceof RyaRange)) return false;
-        if (predicate instanceof RyaRange)
+        if (predicate == null) {
+            return false;
+        }
+        if (subject != null && !(subject instanceof RyaRange)) {
+            return false;
+        }
+        if (predicate instanceof RyaRange) {
             return object == null && subject == null;
+        }
         return subject == null || object != null;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java
index dc83210..b2c29bb 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/query/strategy/wholerow/SpoWholeRowTriplePatternStrategy.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.query.strategy.wholerow;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.query.strategy.wholerow;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,11 +16,18 @@ package org.apache.rya.api.query.strategy.wholerow;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.query.strategy.wholerow;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.LAST_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
 
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
 
-import com.google.common.primitives.Bytes;
 import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
+import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
 import org.apache.rya.api.RdfCloudTripleStoreUtils;
 import org.apache.rya.api.domain.RyaRange;
 import org.apache.rya.api.domain.RyaType;
@@ -33,10 +38,7 @@ import org.apache.rya.api.query.strategy.ByteRange;
 import org.apache.rya.api.resolver.RyaContext;
 import org.apache.rya.api.resolver.RyaTypeResolverException;
 
-import java.io.IOException;
-import java.util.Map;
-
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.*;
+import com.google.common.primitives.Bytes;
 
 /**
  * Date: 7/14/12
@@ -50,19 +52,21 @@ public class SpoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrat
     }
 
     @Override
-    public Map.Entry<TABLE_LAYOUT, ByteRange> defineRange(RyaURI subject, RyaURI predicate, RyaType object,
-                                                          RyaURI context, RdfCloudTripleStoreConfiguration conf) throws IOException {
+    public Map.Entry<TABLE_LAYOUT, ByteRange> defineRange(final RyaURI subject, final RyaURI predicate, final RyaType object,
+                                                          final RyaURI context, final RdfCloudTripleStoreConfiguration conf) throws IOException {
         try {
             //spo(ng)
             //sp(ng)
             //s(ng)
             //sp_r(o)(ng)
             //s_r(p)(ng)
-            if (!handles(subject, predicate, object, context)) return null;
+            if (!handles(subject, predicate, object, context)) {
+                return null;
+            }
 
-            RyaContext ryaContext = RyaContext.getInstance();
+            final RyaContext ryaContext = RyaContext.getInstance();
 
-            TABLE_LAYOUT table_layout = TABLE_LAYOUT.SPO;
+            final TABLE_LAYOUT table_layout = TABLE_LAYOUT.SPO;
             byte[] start;
             byte[] stop;
             if (predicate != null) {
@@ -72,18 +76,18 @@ public class SpoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrat
                         //range = sp_r(o.s)->sp_r(o.e) (remove last byte to remove type info)
                         RyaRange rv = (RyaRange) object;
                         rv = ryaContext.transformRange(rv);
-                        byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0];
-                        byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0];
-                        byte[] subjBytes = subject.getData().getBytes();
-                        byte[] predBytes = predicate.getData().getBytes();
+                        final byte[] objStartBytes = ryaContext.serializeType(rv.getStart())[0];
+                        final byte[] objEndBytes = ryaContext.serializeType(rv.getStop())[0];
+                        final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+                        final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
                         start = Bytes.concat(subjBytes, DELIM_BYTES, predBytes, DELIM_BYTES, objStartBytes);
                         stop = Bytes.concat(subjBytes, DELIM_BYTES, predBytes, DELIM_BYTES, objEndBytes, DELIM_BYTES, LAST_BYTES);
                     } else {
                         //spo
                         //range = spo->spo (remove last byte to remove type info)
                         //TODO: There must be a better way than creating multiple byte[]
-                        byte[] objBytes = ryaContext.serializeType(object)[0];
-                        start = Bytes.concat(subject.getData().getBytes(), DELIM_BYTES, predicate.getData().getBytes(), DELIM_BYTES, objBytes, TYPE_DELIM_BYTES);
+                        final byte[] objBytes = ryaContext.serializeType(object)[0];
+                        start = Bytes.concat(subject.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES, predicate.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES, objBytes, TYPE_DELIM_BYTES);
                         stop = Bytes.concat(start, LAST_BYTES);
                     }
                 } else if (predicate instanceof RyaRange) {
@@ -91,15 +95,15 @@ public class SpoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrat
                     //range = s_r(p.s)->s_r(p.e)
                     RyaRange rv = (RyaRange) predicate;
                     rv = ryaContext.transformRange(rv);
-                    byte[] subjBytes = subject.getData().getBytes();
-                    byte[] predStartBytes = rv.getStart().getData().getBytes();
-                    byte[] predStopBytes = rv.getStop().getData().getBytes();
+                    final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+                    final byte[] predStartBytes = rv.getStart().getData().getBytes(StandardCharsets.UTF_8);
+                    final byte[] predStopBytes = rv.getStop().getData().getBytes(StandardCharsets.UTF_8);
                     start = Bytes.concat(subjBytes, DELIM_BYTES, predStartBytes);
                     stop = Bytes.concat(subjBytes, DELIM_BYTES, predStopBytes, DELIM_BYTES, LAST_BYTES);
                 } else {
                     //sp
                     //range = sp
-                    start = Bytes.concat(subject.getData().getBytes(), DELIM_BYTES, predicate.getData().getBytes(), DELIM_BYTES);
+                    start = Bytes.concat(subject.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES, predicate.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES);
                     stop = Bytes.concat(start, LAST_BYTES);
                 }
             } else if (subject instanceof RyaRange) {
@@ -107,23 +111,23 @@ public class SpoWholeRowTriplePatternStrategy extends AbstractTriplePatternStrat
                 //range = r(s.s) -> r(s.e)
                 RyaRange ru = (RyaRange) subject;
                 ru = ryaContext.transformRange(ru);
-                start = ru.getStart().getData().getBytes();
-                stop = Bytes.concat(ru.getStop().getData().getBytes(), DELIM_BYTES, LAST_BYTES);
+                start = ru.getStart().getData().getBytes(StandardCharsets.UTF_8);
+                stop = Bytes.concat(ru.getStop().getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES, LAST_BYTES);
             } else {
                 //s
                 //range = s
-                start = Bytes.concat(subject.getData().getBytes(), DELIM_BYTES);
+                start = Bytes.concat(subject.getData().getBytes(StandardCharsets.UTF_8), DELIM_BYTES);
                 stop = Bytes.concat(start, LAST_BYTES);
             }
             return new RdfCloudTripleStoreUtils.CustomEntry<TABLE_LAYOUT, ByteRange>(table_layout,
                     new ByteRange(start, stop));
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new IOException(e);
         }
     }
 
     @Override
-    public boolean handles(RyaURI subject, RyaURI predicate, RyaType object, RyaURI context) {
+    public boolean handles(final RyaURI subject, final RyaURI predicate, final RyaType object, final RyaURI context) {
         //if subject is not null and (if predicate is null then object must be null)
         return (subject != null && !(subject instanceof RyaURIRange && predicate != null)) && !((predicate == null || predicate instanceof RyaURIRange) && (object != null));
     }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/CustomDatatypeResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/CustomDatatypeResolver.java b/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/CustomDatatypeResolver.java
index 2c4b689..3b4dcfa 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/CustomDatatypeResolver.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/CustomDatatypeResolver.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.resolver.impl;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.resolver.impl;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,15 +16,18 @@ package org.apache.rya.api.resolver.impl;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.resolver.impl;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
 
+import java.nio.charset.StandardCharsets;
 
-import com.google.common.primitives.Bytes;
 import org.apache.rya.api.domain.RyaType;
 import org.apache.rya.api.resolver.RyaTypeResolverException;
 import org.openrdf.model.impl.URIImpl;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
+
+import com.google.common.primitives.Bytes;
 
 /**
  * Date: 7/16/12
@@ -40,30 +41,30 @@ public class CustomDatatypeResolver extends RyaTypeResolverImpl {
     }
 
     @Override
-    public byte[][] serializeType(RyaType ryaType) throws RyaTypeResolverException {
-        byte[] bytes = serializeData(ryaType.getData()).getBytes();
-        return new byte[][]{bytes, Bytes.concat(TYPE_DELIM_BYTES, ryaType.getDataType().stringValue().getBytes(), TYPE_DELIM_BYTES, markerBytes)};
+    public byte[][] serializeType(final RyaType ryaType) throws RyaTypeResolverException {
+        final byte[] bytes = serializeData(ryaType.getData()).getBytes(StandardCharsets.UTF_8);
+        return new byte[][]{bytes, Bytes.concat(TYPE_DELIM_BYTES, ryaType.getDataType().stringValue().getBytes(StandardCharsets.UTF_8), TYPE_DELIM_BYTES, markerBytes)};
     }
 
     @Override
-    public byte[] serialize(RyaType ryaType) throws RyaTypeResolverException {
-        byte[][] bytes = serializeType(ryaType);
+    public byte[] serialize(final RyaType ryaType) throws RyaTypeResolverException {
+        final byte[][] bytes = serializeType(ryaType);
         return Bytes.concat(bytes[0], bytes[1]);
     }
 
     @Override
-    public RyaType deserialize(byte[] bytes) throws RyaTypeResolverException {
+    public RyaType deserialize(final byte[] bytes) throws RyaTypeResolverException {
         if (!deserializable(bytes)) {
             throw new RyaTypeResolverException("Bytes not deserializable");
         }
-        RyaType rt = newInstance();
-        int length = bytes.length;
-        int indexOfType = Bytes.indexOf(bytes, TYPE_DELIM_BYTE);
+        final RyaType rt = newInstance();
+        final int length = bytes.length;
+        final int indexOfType = Bytes.indexOf(bytes, TYPE_DELIM_BYTE);
         if (indexOfType < 1) {
             throw new RyaTypeResolverException("Not a datatype literal");
         }
-        String label = deserializeData(new String(bytes, 0, indexOfType));
-        rt.setDataType(new URIImpl(new String(bytes, indexOfType + 1, (length - indexOfType) - 3)));
+        final String label = deserializeData(new String(bytes, 0, indexOfType, StandardCharsets.UTF_8));
+        rt.setDataType(new URIImpl(new String(bytes, indexOfType + 1, (length - indexOfType) - 3, StandardCharsets.UTF_8)));
         rt.setData(label);
         return rt;
     }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/RyaTypeResolverImpl.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/RyaTypeResolverImpl.java b/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/RyaTypeResolverImpl.java
index 3e6c6b4..943a1d3 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/RyaTypeResolverImpl.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/resolver/impl/RyaTypeResolverImpl.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.resolver.impl;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.resolver.impl;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,10 +16,13 @@ package org.apache.rya.api.resolver.impl;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.resolver.impl;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
 
+import java.nio.charset.StandardCharsets;
 
-import com.google.common.primitives.Bytes;
 import org.apache.rya.api.domain.RyaRange;
 import org.apache.rya.api.domain.RyaType;
 import org.apache.rya.api.resolver.RyaTypeResolver;
@@ -31,8 +32,7 @@ import org.calrissian.mango.types.TypeEncoder;
 import org.openrdf.model.URI;
 import org.openrdf.model.vocabulary.XMLSchema;
 
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
+import com.google.common.primitives.Bytes;
 
 /**
  * Date: 7/16/12
@@ -51,12 +51,12 @@ public class RyaTypeResolverImpl implements RyaTypeResolver {
         this((byte) PLAIN_LITERAL_MARKER, XMLSchema.STRING);
     }
 
-    public RyaTypeResolverImpl(byte markerByte, URI dataType) {
+    public RyaTypeResolverImpl(final byte markerByte, final URI dataType) {
         setMarkerByte(markerByte);
         setRyaDataType(dataType);
     }
 
-    public void setMarkerByte(byte markerByte) {
+    public void setMarkerByte(final byte markerByte) {
         this.markerByte = markerByte;
         this.markerBytes = new byte[]{markerByte};
     }
@@ -67,19 +67,19 @@ public class RyaTypeResolverImpl implements RyaTypeResolver {
     }
 
     @Override
-    public RyaRange transformRange(RyaRange ryaRange) throws RyaTypeResolverException {
+    public RyaRange transformRange(final RyaRange ryaRange) throws RyaTypeResolverException {
         return ryaRange;
     }
 
     @Override
-    public byte[] serialize(RyaType ryaType) throws RyaTypeResolverException {
-        byte[][] bytes = serializeType(ryaType);
+    public byte[] serialize(final RyaType ryaType) throws RyaTypeResolverException {
+        final byte[][] bytes = serializeType(ryaType);
         return Bytes.concat(bytes[0], bytes[1]);
     }
 
     @Override
-    public byte[][] serializeType(RyaType ryaType) throws RyaTypeResolverException {
-        byte[] bytes = serializeData(ryaType.getData()).getBytes();
+    public byte[][] serializeType(final RyaType ryaType) throws RyaTypeResolverException {
+        final byte[] bytes = serializeData(ryaType.getData()).getBytes(StandardCharsets.UTF_8);
         return new byte[][]{bytes, Bytes.concat(TYPE_DELIM_BYTES, markerBytes)};
     }
 
@@ -88,7 +88,7 @@ public class RyaTypeResolverImpl implements RyaTypeResolver {
         return dataType;
     }
 
-    public void setRyaDataType(URI dataType) {
+    public void setRyaDataType(final URI dataType) {
         this.dataType = dataType;
     }
 
@@ -98,27 +98,27 @@ public class RyaTypeResolverImpl implements RyaTypeResolver {
     }
 
     @Override
-    public boolean deserializable(byte[] bytes) {
+    public boolean deserializable(final byte[] bytes) {
         return bytes != null && bytes.length >= 2 && bytes[bytes.length - 1] == getMarkerByte() && bytes[bytes.length - 2] == TYPE_DELIM_BYTE;
     }
 
-    protected String serializeData(String data) throws RyaTypeResolverException {
+    protected String serializeData(final String data) throws RyaTypeResolverException {
         return STRING_TYPE_ENCODER.encode(data);
     }
 
     @Override
-    public RyaType deserialize(byte[] bytes) throws RyaTypeResolverException {
+    public RyaType deserialize(final byte[] bytes) throws RyaTypeResolverException {
         if (!deserializable(bytes)) {
             throw new RyaTypeResolverException("Bytes not deserializable");
         }
-        RyaType rt = newInstance();
+        final RyaType rt = newInstance();
         rt.setDataType(getRyaDataType());
-        String data = new String(bytes, 0, bytes.length - 2);
+        final String data = new String(bytes, 0, bytes.length - 2, StandardCharsets.UTF_8);
         rt.setData(deserializeData(data));
         return rt;
     }
 
-    protected String deserializeData(String data) throws RyaTypeResolverException {
+    protected String deserializeData(final String data) throws RyaTypeResolverException {
         return STRING_TYPE_ENCODER.decode(data);
     }
 }


[08/11] incubator-rya git commit: RYA-333 Added "-P coverage" to generate code coverage reports. Closes #241.

Posted by ca...@apache.org.
RYA-333 Added "-P coverage" to generate code coverage reports. Closes
#241.

Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/244b8703
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/244b8703
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/244b8703

Branch: refs/heads/master
Commit: 244b87037a1c96ca7cba6aec0d392a402ebd8340
Parents: 538cfcc
Author: jdasch <hc...@gmail.com>
Authored: Tue Oct 10 09:51:51 2017 -0400
Committer: jdasch <hc...@gmail.com>
Committed: Thu Oct 12 12:55:03 2017 -0400

----------------------------------------------------------------------
 extras/shell/pom.xml | 22 ----------------------
 pom.xml              | 48 +++++++++++++++++++++++++++++++++++++++++++++++
 2 files changed, 48 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/244b8703/extras/shell/pom.xml
----------------------------------------------------------------------
diff --git a/extras/shell/pom.xml b/extras/shell/pom.xml
index 058f780..a2aedbc 100644
--- a/extras/shell/pom.xml
+++ b/extras/shell/pom.xml
@@ -123,7 +123,6 @@
             <plugin>
                 <groupId>com.mycila</groupId>
                 <artifactId>license-maven-plugin</artifactId>
-                <version>2.6</version>
                 <configuration>
                     <!-- We use a custome Apache 2.0 license because we do not include a copywrite section. -->                
                     <header>src/main/resources/LICENSE.txt</header>
@@ -197,27 +196,6 @@
                     </execution>
                 </executions>
             </plugin>
-            
-            <!-- Generate Code Coverage report. -->
-            <plugin>
-                <groupId>org.jacoco</groupId>
-                <artifactId>jacoco-maven-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>default-prepare-agent</id>
-                        <goals>
-                            <goal>prepare-agent</goal>
-                        </goals>
-                    </execution>
-                    <execution>
-                        <id>default-report</id>
-                        <phase>prepare-package</phase>
-                        <goals>
-                            <goal>report</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
         </plugins>
     </build>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/244b8703/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a6415ad..e5c5f5e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -151,6 +151,54 @@ under the License.
                 <skip.rya.it>false</skip.rya.it>
             </properties>
         </profile>
+
+        <!-- Enable this profile to generate a Code Coverage report with jacoco "mvn ... -P coverage" -->
+        <profile>
+            <id>coverage</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.jacoco</groupId>
+                        <artifactId>jacoco-maven-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <id>default-prepare-agent</id>
+                                <goals>
+                                    <goal>prepare-agent</goal>
+                                </goals>
+                                <configuration>
+                                    <append>true</append>
+                                </configuration>
+                            </execution>
+                            <execution>
+                                <id>default-report</id>
+                                <phase>prepare-package</phase>
+                                <goals>
+                                    <goal>report</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+            <!--  Add a reporting section for usage of jacoco with the maven-site-plugin -->
+            <reporting>
+                <plugins>
+                    <plugin>
+                        <groupId>org.jacoco</groupId>
+                        <artifactId>jacoco-maven-plugin</artifactId>
+                        <reportSets>
+                            <reportSet>
+                                <reports>
+                                    <!-- select non-aggregate reports -->
+                                    <report>report</report>
+                                </reports>
+                            </reportSet>
+                        </reportSets>
+                    </plugin>
+                </plugins>
+            </reporting>
+        </profile>
     </profiles>
     
     <dependencyManagement>


[11/11] incubator-rya git commit: RYA-392-Datetime-Within. Closes #237.

Posted by ca...@apache.org.
RYA-392-Datetime-Within. Closes #237.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/29a8e6b7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/29a8e6b7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/29a8e6b7

Branch: refs/heads/master
Commit: 29a8e6b75c0ed903df2da8112f30f4941448582a
Parents: a3b2042
Author: Caleb Meier <ca...@parsons.com>
Authored: Wed Oct 4 12:06:55 2017 -0700
Committer: jdasch <hc...@gmail.com>
Committed: Thu Oct 12 12:56:26 2017 -0400

----------------------------------------------------------------------
 .../rya/api/functions/DateTimeWithinPeriod.java | 130 ++++++++++++++
 .../org/apache/rya/api/functions/OWLTime.java   | 111 ++++++++++++
 ...f.query.algebra.evaluation.function.Function |  17 ++
 .../api/functions/DateTimeWithinPeriodTest.java | 180 +++++++++++++++++++
 .../pcj/fluo/app/FilterResultUpdater.java       |  14 +-
 .../pcj/fluo/app/util/FilterSerializer.java     |  27 ++-
 .../pcj/fluo/app/util/FilterSerializerTest.java |  48 +++++
 .../indexing/pcj/fluo/integration/QueryIT.java  |  89 +++++++++
 8 files changed, 611 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/29a8e6b7/common/rya.api/src/main/java/org/apache/rya/api/functions/DateTimeWithinPeriod.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/functions/DateTimeWithinPeriod.java b/common/rya.api/src/main/java/org/apache/rya/api/functions/DateTimeWithinPeriod.java
new file mode 100644
index 0000000..aedeea7
--- /dev/null
+++ b/common/rya.api/src/main/java/org/apache/rya/api/functions/DateTimeWithinPeriod.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */package org.apache.rya.api.functions;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.time.Duration;
+import java.time.Instant;
+
+import org.openrdf.model.Literal;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.datatypes.XMLDatatypeUtil;
+import org.openrdf.model.vocabulary.FN;
+import org.openrdf.model.vocabulary.XMLSchema;
+import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
+import org.openrdf.query.algebra.evaluation.function.Function;
+
+/**
+ * This {@link Function} determines whether two {@link XMLSchema#DATETIME}s occur within a specified period of time of
+ * one another. The method {@link Function#evaluate(ValueFactory, Value...)} expects four values, where the first two
+ * values are the datetimes, the third value is an integer indicating the period, and the fourth value is a URI
+ * indicating the time unit of the period. The URI must be of Type DurationDescription in the OWL-Time ontology (see
+ * <a href ="https://www.w3.org/TR/owl-time/">https://www.w3.org/TR/owl-time/</a>). Examples of valid time unit URIs can
+ * be found in the class {@link OWLTime} and below
+ * <ul>
+ * <li>http://www.w3.org/2006/time#days</li>
+ * <li>http://www.w3.org/2006/time#hours</li>
+ * <li>http://www.w3.org/2006/time#minutes</li>
+ * <li>http://www.w3.org/2006/time#seconds</li>
+ * </ul>
+ *
+ */
+public class DateTimeWithinPeriod implements Function {
+
+    private static final String FUNCTION_URI = FN.NAMESPACE + "dateTimeWithin";
+
+    @Override
+    public String getURI() {
+        return FUNCTION_URI;
+    }
+
+    /**
+     * Determines whether two datetimes occur within a specified period of time of one another. This method expects four
+     * values, where the first two values are the datetimes, the third value is an integer indicating the period, and
+     * the fourth value is a URI indicating the time unit of the period. The URI must be of Type DurationDescription in
+     * the OWL-Time ontology (see <a href ="https://www.w3.org/TR/owl-time/">https://www.w3.org/TR/owl-time/</a>).
+     * Examples of valid time unit URIs can be found in the class {@link OWLTime} and below
+     * <ul>
+     * <li>http://www.w3.org/2006/time#days</li>
+     * <li>http://www.w3.org/2006/time#hours</li>
+     * <li>http://www.w3.org/2006/time#minutes</li>
+     * <li>http://www.w3.org/2006/time#seconds</li>
+     * </ul>
+     *
+     * @param valueFactory - factory for creating values (not null)
+     * @param values - array of Value arguments for this Function (not null).
+     */
+    @Override
+    public Value evaluate(ValueFactory valueFactory, Value... values) throws ValueExprEvaluationException {
+        checkNotNull(valueFactory);
+        checkNotNull(values);
+        try {
+            // general validation of input
+            checkArgument(values.length == 4);
+            checkArgument(values[0] instanceof Literal);
+            checkArgument(values[1] instanceof Literal);
+            checkArgument(values[2] instanceof Literal);
+            checkArgument(values[3] instanceof URI);
+
+            Instant dateTime1 = convertToInstant((Literal) values[0]);
+            Instant dateTime2 = convertToInstant((Literal) values[1]);
+            long periodMillis = convertPeriodToMillis((Literal) values[2], (URI) values[3]);
+            long timeBetween = Math.abs(Duration.between(dateTime1, dateTime2).toMillis());
+
+            return valueFactory.createLiteral(timeBetween < periodMillis);
+        } catch (Exception e) {
+            throw new ValueExprEvaluationException(e);
+        }
+    }
+
+    private Instant convertToInstant(Literal literal) {
+        String stringVal = literal.getLabel();
+        URI dataType = literal.getDatatype();
+        checkArgument(dataType.equals(XMLSchema.DATETIME) || dataType.equals(XMLSchema.DATE),
+                String.format("Invalid data type for date time. Data Type must be of type %s or %s .", XMLSchema.DATETIME, XMLSchema.DATE));
+        checkArgument(XMLDatatypeUtil.isValidDateTime(stringVal) || XMLDatatypeUtil.isValidDate(stringVal), "Invalid date time value.");
+        return literal.calendarValue().toGregorianCalendar().toInstant();
+    }
+
+    private long convertPeriodToMillis(Literal literal, URI unit) {
+        String stringVal = literal.getLabel();
+        URI dataType = literal.getDatatype();
+        checkArgument(dataType.equals(XMLSchema.INTEGER) || dataType.equals(XMLSchema.INT), String
+                .format("Invalid data type for period duration. Data Type must be of type %s or %s .", XMLSchema.INTEGER, XMLSchema.INT));
+        checkArgument(XMLDatatypeUtil.isValidInteger(stringVal) || XMLDatatypeUtil.isValidInt(stringVal), "Invalid duration value.");
+        return convertToMillis(Integer.parseInt(stringVal), unit);
+    }
+
+    /**
+     * Converts the period duration to milliseconds.
+     *
+     * @param duration - duration of temporal period
+     * @param unit - URI indicating the time unit (URI must be of type DurationDescription in the OWL-Time ontology
+     *            indicated by the namespace <http://www.w3.org/2006/time#>)
+     * @return - duration in milliseconds
+     */
+    private long convertToMillis(int duration, URI unit) {
+        checkArgument(duration > 0);
+        return OWLTime.getMillis(duration, unit);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/29a8e6b7/common/rya.api/src/main/java/org/apache/rya/api/functions/OWLTime.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/functions/OWLTime.java b/common/rya.api/src/main/java/org/apache/rya/api/functions/OWLTime.java
new file mode 100644
index 0000000..5ffc4ee
--- /dev/null
+++ b/common/rya.api/src/main/java/org/apache/rya/api/functions/OWLTime.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.api.functions;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import java.time.temporal.ChronoUnit;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+
+import org.openrdf.model.URI;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ValueFactoryImpl;
+
+/**
+ * Constants for OWL-Time primitives in the OWL-Time namespace.
+ *
+ */
+public class OWLTime {
+
+    private static final ValueFactory FACTORY = ValueFactoryImpl.getInstance();
+
+    /**
+     * Indicates namespace of OWL-Time ontology
+     */
+    public static final String NAMESPACE = "http://www.w3.org/2006/time#";
+    /**
+     * Seconds class of type DurationDescription in OWL-Time ontology
+     */
+    public static final URI SECONDS_URI = FACTORY.createURI(NAMESPACE, "seconds");
+    /**
+     * Minutes class of type DurationDescription in OWL-Time ontology
+     */
+    public static final URI MINUTES_URI = FACTORY.createURI(NAMESPACE, "minutes");
+    /**
+     * Hours class of type DurationDescription in OWL-Time ontology
+     */
+    public static final URI HOURS_URI = FACTORY.createURI(NAMESPACE, "hours");
+    /**
+     * Days class of type DurationDescription in OWL-Time ontology
+     */
+    public static final URI DAYS_URI = FACTORY.createURI(NAMESPACE, "days");
+    /**
+     * Weeks class of type DurationDescription in OWL-Time ontology
+     */
+    public static final URI WEEKS_URI = FACTORY.createURI(NAMESPACE, "weeks");
+
+    private static final Map<URI, ChronoUnit> DURATION_MAP = new HashMap<>();
+
+    static {
+        DURATION_MAP.put(SECONDS_URI, ChronoUnit.SECONDS);
+        DURATION_MAP.put(MINUTES_URI, ChronoUnit.MINUTES);
+        DURATION_MAP.put(HOURS_URI, ChronoUnit.HOURS);
+        DURATION_MAP.put(DAYS_URI, ChronoUnit.DAYS);
+        DURATION_MAP.put(WEEKS_URI, ChronoUnit.WEEKS);
+    }
+
+    /**
+     * Verifies whether URI is a valid OWL-Time URI that is supported by this class.
+     * @param durationURI - OWLTime URI indicating the time unit (not null)
+     * @return - {@code true} if this URI indicates a supported OWLTime time unit
+     */
+    public static boolean isValidDurationType(URI durationURI) {
+        checkNotNull(durationURI);
+        return DURATION_MAP.containsKey(durationURI);
+    }
+
+    /**
+     * Returns the duration in milliseconds
+     *
+     * @param duration - amount of time in the units indicated by the provided {@link OWLTime} URI
+     * @param uri - OWLTime URI indicating the time unit of duration (not null)
+     * @return - the amount of time in milliseconds
+     * @throws IllegalArgumentException if provided {@link URI} is not a valid, supported OWL-Time time unit.
+     */
+    public static long getMillis(int duration, URI uri) throws IllegalArgumentException {
+        Optional<ChronoUnit> unit = getChronoUnitFromURI(uri);
+        checkArgument(unit.isPresent(),
+                String.format("URI %s does not indicate a valid OWLTime time unit.  URI must of be of type %s, %s, %s, %s, or %s .", uri,
+                        SECONDS_URI, MINUTES_URI, HOURS_URI, DAYS_URI, WEEKS_URI));
+        return duration * unit.get().getDuration().toMillis();
+    }
+
+    /**
+     * Converts the {@link OWLTime} URI time unit to a {@link ChronoUnit} time unit
+     *
+     * @param durationURI - OWLTime time unit URI (not null)
+     * @return - corresponding ChronoUnit time unit
+     */
+    public static Optional<ChronoUnit> getChronoUnitFromURI(URI durationURI) {
+        return Optional.ofNullable(DURATION_MAP.get(durationURI));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/29a8e6b7/common/rya.api/src/main/resources/META-INF/services/org.openrdf.query.algebra.evaluation.function.Function
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/resources/META-INF/services/org.openrdf.query.algebra.evaluation.function.Function b/common/rya.api/src/main/resources/META-INF/services/org.openrdf.query.algebra.evaluation.function.Function
new file mode 100644
index 0000000..104a13c
--- /dev/null
+++ b/common/rya.api/src/main/resources/META-INF/services/org.openrdf.query.algebra.evaluation.function.Function
@@ -0,0 +1,17 @@
+ # Licensed to the Apache Software Foundation (ASF) under one
+ # or more contributor license agreements.  See the NOTICE file
+ # distributed with this work for additional information
+ # regarding copyright ownership.  The ASF licenses this file
+ # to you under the Apache License, Version 2.0 (the
+ # "License"); you may not use this file except in compliance
+ # with the License.  You may obtain a copy of the License at
+ #
+ #   http://www.apache.org/licenses/LICENSE-2.0
+ #
+ # Unless required by applicable law or agreed to in writing,
+ # software distributed under the License is distributed on an
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ # KIND, either express or implied.  See the License for the
+ # specific language governing permissions and limitations
+ # under the License.
+org.apache.rya.api.functions.DateTimeWithinPeriod
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/29a8e6b7/common/rya.api/src/test/java/org/apache/rya/api/functions/DateTimeWithinPeriodTest.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/test/java/org/apache/rya/api/functions/DateTimeWithinPeriodTest.java b/common/rya.api/src/test/java/org/apache/rya/api/functions/DateTimeWithinPeriodTest.java
new file mode 100644
index 0000000..0fb0f2a
--- /dev/null
+++ b/common/rya.api/src/test/java/org/apache/rya/api/functions/DateTimeWithinPeriodTest.java
@@ -0,0 +1,180 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.api.functions;
+
+import static org.junit.Assert.assertEquals;
+
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+
+import javax.xml.datatype.DatatypeConfigurationException;
+import javax.xml.datatype.DatatypeFactory;
+
+import org.junit.Test;
+import org.openrdf.model.Literal;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
+
+public class DateTimeWithinPeriodTest {
+
+    private static final ValueFactory vf = new ValueFactoryImpl();
+    private static final Literal TRUE = vf.createLiteral(true);
+    private static final Literal FALSE = vf.createLiteral(false);
+
+    @Test
+    public void testSeconds() throws DatatypeConfigurationException, ValueExprEvaluationException {
+        DatatypeFactory dtf = DatatypeFactory.newInstance();
+
+        ZonedDateTime zTime = ZonedDateTime.now();
+        String time = zTime.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime1 = zTime.minusSeconds(1);
+        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+
+        Literal now = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
+        Literal nowMinusOne = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
+
+        DateTimeWithinPeriod func = new DateTimeWithinPeriod();
+
+        assertEquals(TRUE, func.evaluate(vf, now, now, vf.createLiteral(1), OWLTime.SECONDS_URI));
+        assertEquals(FALSE, func.evaluate(vf, now, nowMinusOne,vf.createLiteral(1), OWLTime.SECONDS_URI));
+        assertEquals(TRUE, func.evaluate(vf, now, nowMinusOne,vf.createLiteral(2), OWLTime.SECONDS_URI));
+    }
+
+    @Test
+    public void testMinutes() throws DatatypeConfigurationException, ValueExprEvaluationException {
+
+        DatatypeFactory dtf = DatatypeFactory.newInstance();
+
+        ZonedDateTime zTime = ZonedDateTime.now();
+        String time = zTime.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime1 = zTime.minusMinutes(1);
+        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+
+        Literal now = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
+        Literal nowMinusOne = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
+
+        DateTimeWithinPeriod func = new DateTimeWithinPeriod();
+
+        assertEquals(TRUE, func.evaluate(vf, now, now,vf.createLiteral(1),OWLTime.MINUTES_URI));
+        assertEquals(FALSE, func.evaluate(vf, now, nowMinusOne,vf.createLiteral(1),OWLTime.MINUTES_URI));
+        assertEquals(TRUE, func.evaluate(vf, now, nowMinusOne,vf.createLiteral(2),OWLTime.MINUTES_URI));
+    }
+
+
+    @Test
+    public void testHours() throws DatatypeConfigurationException, ValueExprEvaluationException {
+        DatatypeFactory dtf = DatatypeFactory.newInstance();
+
+        ZonedDateTime zTime = ZonedDateTime.now();
+        String time = zTime.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime1 = zTime.minusHours(1);
+        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+
+        Literal now = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
+        Literal nowMinusOne = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
+
+        DateTimeWithinPeriod func = new DateTimeWithinPeriod();
+
+        assertEquals(TRUE, func.evaluate(vf, now, now,vf.createLiteral(1),OWLTime.HOURS_URI));
+        assertEquals(FALSE, func.evaluate(vf, now, nowMinusOne,vf.createLiteral(1),OWLTime.HOURS_URI));
+        assertEquals(TRUE, func.evaluate(vf, now, nowMinusOne,vf.createLiteral(2),OWLTime.HOURS_URI));
+    }
+
+
+    @Test
+    public void testDays() throws DatatypeConfigurationException, ValueExprEvaluationException {
+        DatatypeFactory dtf = DatatypeFactory.newInstance();
+
+        ZonedDateTime zTime = ZonedDateTime.now();
+        String time = zTime.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime1 = zTime.minusDays(1);
+        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+
+        Literal now = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
+        Literal nowMinusOne = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
+
+        DateTimeWithinPeriod func = new DateTimeWithinPeriod();
+
+        assertEquals(TRUE, func.evaluate(vf, now, now, vf.createLiteral(1), OWLTime.DAYS_URI));
+        assertEquals(FALSE, func.evaluate(vf, now, nowMinusOne, vf.createLiteral(1), OWLTime.DAYS_URI));
+        assertEquals(TRUE, func.evaluate(vf, now, nowMinusOne, vf.createLiteral(2), OWLTime.DAYS_URI));
+    }
+
+    @Test
+    public void testWeeks() throws DatatypeConfigurationException, ValueExprEvaluationException {
+        DatatypeFactory dtf = DatatypeFactory.newInstance();
+
+        ZonedDateTime zTime = ZonedDateTime.now();
+        String time = zTime.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime1 = zTime.minusWeeks(1);
+        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime2 = zTime.minusWeeks(7);
+        String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
+
+        Literal now = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
+        Literal nowMinusOne = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
+        Literal nowMinusSeven = vf.createLiteral(dtf.newXMLGregorianCalendar(time2));
+
+        DateTimeWithinPeriod func = new DateTimeWithinPeriod();
+
+        assertEquals(TRUE, func.evaluate(vf, now, now, vf.createLiteral(1), OWLTime.WEEKS_URI));
+        assertEquals(FALSE, func.evaluate(vf, now, nowMinusOne, vf.createLiteral(1), OWLTime.WEEKS_URI));
+        assertEquals(TRUE, func.evaluate(vf, now, nowMinusOne, vf.createLiteral(2), OWLTime.WEEKS_URI));
+        assertEquals(FALSE, func.evaluate(vf, now, nowMinusSeven, vf.createLiteral(7), OWLTime.WEEKS_URI));
+    }
+
+    @Test
+    public void testTimeZone() throws DatatypeConfigurationException, ValueExprEvaluationException {
+        DatatypeFactory dtf = DatatypeFactory.newInstance();
+
+        ZonedDateTime now = ZonedDateTime.now();
+        String time = now.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime1 = now.withZoneSameInstant(ZoneId.of("Europe/London"));
+        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime2 = now.withZoneSameInstant(ZoneId.of("Australia/Sydney"));
+        String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime3 = now.minusDays(1).withZoneSameInstant(ZoneId.of("Asia/Seoul"));
+        String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
+
+        Literal nowLocal = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
+        Literal nowEuropeTZ = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
+        Literal nowAustraliaTZ = vf.createLiteral(dtf.newXMLGregorianCalendar(time2));
+        Literal nowAsiaTZMinusOne = vf.createLiteral(dtf.newXMLGregorianCalendar(time3));
+
+        DateTimeWithinPeriod func = new DateTimeWithinPeriod();
+
+        assertEquals(TRUE, func.evaluate(vf, nowLocal, nowEuropeTZ, vf.createLiteral(1), OWLTime.SECONDS_URI));
+        assertEquals(TRUE, func.evaluate(vf, nowLocal, nowAustraliaTZ, vf.createLiteral(1), OWLTime.SECONDS_URI));
+        assertEquals(FALSE, func.evaluate(vf, nowLocal, nowAsiaTZMinusOne, vf.createLiteral(1), OWLTime.DAYS_URI));
+        assertEquals(TRUE, func.evaluate(vf, nowLocal, nowAsiaTZMinusOne, vf.createLiteral(2), OWLTime.DAYS_URI));
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/29a8e6b7/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/FilterResultUpdater.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/FilterResultUpdater.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/FilterResultUpdater.java
index 7cfa216..17ed158 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/FilterResultUpdater.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/FilterResultUpdater.java
@@ -39,9 +39,11 @@ import org.openrdf.model.impl.ValueFactoryImpl;
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.QueryEvaluationException;
 import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.FunctionCall;
 import org.openrdf.query.algebra.ValueExpr;
 import org.openrdf.query.algebra.evaluation.TripleSource;
 import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
+import org.openrdf.query.algebra.evaluation.function.FunctionRegistry;
 import org.openrdf.query.algebra.evaluation.impl.EvaluationStrategyImpl;
 import org.openrdf.query.algebra.evaluation.util.QueryEvaluationUtil;
 
@@ -130,16 +132,20 @@ public class FilterResultUpdater {
      * @param condition - The filter condition. (not null)
      * @param bindings - The binding set to evaluate. (not null)
      * @return {@code true} if the binding set is accepted by the filter; otherwise {@code false}.
-     * @throws QueryEvaluationException The condition couldn't be evaluated.
+     * @throws QueryEvaluationException The condition couldn't be evaluated. In the case that the ValueExpr is a
+     *             {@link FunctionCall}, this Exception is thrown because the Function could not be found in the
+     *             {@link FunctionRegistry}.
      */
     private static boolean isTrue(final ValueExpr condition, final BindingSet bindings) throws QueryEvaluationException {
         try {
             final Value value = evaluator.evaluate(condition, bindings);
             return QueryEvaluationUtil.getEffectiveBooleanValue(value);
         } catch (final ValueExprEvaluationException e) {
-            // XXX Hack: If filtering a statement that does not have the right bindings, return true.
-            //           When would this ever come up? Should we actually return true?
-            return true;
+            //False returned because for whatever reason, the ValueExpr could not be evaluated.
+            //In the event that the ValueExpr is a FunctionCall, this Exception will be generated if
+            //the Function URI is a valid URI that was found in the FunctionRegistry, but the arguments
+            //for that Function could not be parsed.
+            return false;
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/29a8e6b7/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializer.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializer.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializer.java
index 73f3447..6c99809 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializer.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializer.java
@@ -23,6 +23,8 @@ import java.util.Set;
 
 import org.openrdf.query.algebra.Filter;
 import org.openrdf.query.algebra.SingletonSet;
+import org.openrdf.query.algebra.evaluation.function.Function;
+import org.openrdf.query.algebra.evaluation.function.FunctionRegistry;
 import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
 import org.openrdf.query.parser.ParsedQuery;
 import org.openrdf.query.parser.ParsedTupleQuery;
@@ -52,7 +54,7 @@ public class FilterSerializer {
         Filter clone = filter.clone();
         clone.setArg(new SingletonSet());
         try {
-            return renderer.render(new ParsedTupleQuery(clone));
+            return removeAngularBracketsFromNonUriFunctions(renderer.render(new ParsedTupleQuery(clone)));
         } catch (Exception e) {
             throw new FilterParseException("Unable to parse Filter.", e);
         }
@@ -98,6 +100,29 @@ public class FilterSerializer {
         public void meet(Filter node) {
             filters.add(node);
         }
+        
+    }
+    
+    /**
+     * There are a number of Functions in the FunctionRegistry whose getURI() method does not return a valid URI (NOW()
+     * is one such method). The SPARQLQueryRender adds angular brackets to the result returned by
+     * {@link Function#getURI()} by default, which leads to a MalformedQueryException when the SPARQLParser attempts to
+     * parse the SPARQL created by the renderer. Therefore, a call to serialize and then deserialize for a Filter
+     * containing a Function that returns an invalid URI will generate an exception. This method removes the angular
+     * brackets from the result returned by {@link Function#getURI()} if it is not a valid URI so that the parser will
+     * parse it.
+     * 
+     * @param query - query generated by query renderer
+     * @return - String with angular brackets removed from around all invalid Function URI
+     */
+    private static String removeAngularBracketsFromNonUriFunctions(String query) {
+        FunctionRegistry registry = FunctionRegistry.getInstance();
+        for(String key: registry.getKeys()) {
+            if (key.indexOf(':') < 0) {
+                query = query.replace("<"+key.trim()+">", key);
+            }
+        }
+        return query;
     }
     
     public static class FilterParseException extends Exception {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/29a8e6b7/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializerTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializerTest.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializerTest.java
new file mode 100644
index 0000000..b2efa96
--- /dev/null
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/test/java/org/apache/rya/indexing/pcj/fluo/app/util/FilterSerializerTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.indexing.pcj.fluo.app.util;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Test;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.parser.ParsedQuery;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+
+public class FilterSerializerTest {
+
+    @Test
+    public void nowTest() throws Exception {
+
+        //tests to see if NOW function is correctly serialized and deserialized
+        //by FilterSerializer
+        String query = "select * {Filter(NOW())}";
+        SPARQLParser parser = new SPARQLParser();
+        ParsedQuery pq = parser.parseQuery(query, null);
+        Filter filter = (Filter) ((Projection) pq.getTupleExpr()).getArg();
+        String filterString = FilterSerializer.serialize(filter);
+        Filter deserializedFilter = FilterSerializer.deserialize(filterString);
+        
+        assertEquals(filter, deserializedFilter);
+
+    }
+    
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/29a8e6b7/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java
index 4974aee..beaef32 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.integration/src/test/java/org/apache/rya/indexing/pcj/fluo/integration/QueryIT.java
@@ -37,6 +37,8 @@ import org.apache.fluo.core.client.FluoClientImpl;
 import org.apache.rya.api.client.CreatePCJ.ExportStrategy;
 import org.apache.rya.api.client.RyaClient;
 import org.apache.rya.api.client.accumulo.AccumuloRyaClientFactory;
+import org.apache.rya.api.functions.DateTimeWithinPeriod;
+import org.apache.rya.api.functions.OWLTime;
 import org.apache.rya.indexing.pcj.fluo.api.CreateFluoPcj;
 import org.apache.rya.indexing.pcj.fluo.app.query.UnsupportedQueryException;
 import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
@@ -54,6 +56,7 @@ import org.openrdf.model.ValueFactory;
 import org.openrdf.model.datatypes.XMLDatatypeUtil;
 import org.openrdf.model.impl.BooleanLiteralImpl;
 import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.model.vocabulary.FN;
 import org.openrdf.model.vocabulary.XMLSchema;
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.algebra.evaluation.ValueExprEvaluationException;
@@ -423,6 +426,92 @@ public class QueryIT extends RyaExportITBase {
         runTest(sparql, statements, expectedResults, ExportStrategy.RYA);
     }
 
+    
+    @Test
+    public void dateTimeWithin() throws Exception {
+        
+        final ValueFactory vf = new ValueFactoryImpl();
+        DatatypeFactory dtf = DatatypeFactory.newInstance();
+        FunctionRegistry.getInstance().add(new DateTimeWithinPeriod());
+
+        final String sparql = "PREFIX fn: <" + FN.NAMESPACE +">"
+                + "SELECT ?event ?startTime ?endTime WHERE { ?event <uri:startTime> ?startTime; <uri:endTime> ?endTime. "
+                + "FILTER(fn:dateTimeWithin(?startTime, ?endTime, 2,<" + OWLTime.HOURS_URI + "> ))}";
+        
+        ZonedDateTime zTime = ZonedDateTime.now();
+        String time = zTime.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime1 = zTime.minusHours(1);
+        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+        
+        ZonedDateTime zTime2 = zTime.minusHours(2);
+        String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
+
+        Literal lit = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
+        Literal lit1 = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
+        Literal lit2 = vf.createLiteral(dtf.newXMLGregorianCalendar(time2));
+
+        // Create the Statements that will be loaded into Rya.
+        final Collection<Statement> statements = Sets.newHashSet(
+                vf.createStatement(vf.createURI("uri:event1"), vf.createURI("uri:startTime"), lit),
+                vf.createStatement(vf.createURI("uri:event1"), vf.createURI("uri:endTime"), lit1),
+                vf.createStatement(vf.createURI("uri:event2"), vf.createURI("uri:startTime"), lit),
+                vf.createStatement(vf.createURI("uri:event2"), vf.createURI("uri:endTime"), lit2)
+               );
+
+        // Create the expected results of the SPARQL query once the PCJ has been computed.
+        final Set<BindingSet> expectedResults = new HashSet<>();
+
+        MapBindingSet bs = new MapBindingSet();
+        bs.addBinding("event", vf.createURI("uri:event1"));
+        bs.addBinding("startTime", lit);
+        bs.addBinding("endTime", lit1);
+        expectedResults.add(bs);
+
+        // Verify the end results of the query match the expected results.
+        runTest(sparql, statements, expectedResults, ExportStrategy.RYA);
+    }
+    
+    @Test
+    public void dateTimeWithinNow() throws Exception {
+        
+        final ValueFactory vf = new ValueFactoryImpl();
+        DatatypeFactory dtf = DatatypeFactory.newInstance();
+        FunctionRegistry.getInstance().add(new DateTimeWithinPeriod());
+
+        final String sparql = "PREFIX fn: <" + FN.NAMESPACE +">"
+                + "SELECT ?event ?startTime WHERE { ?event <uri:startTime> ?startTime. "
+                + "FILTER(fn:dateTimeWithin(?startTime, NOW(), 15, <" + OWLTime.SECONDS_URI + "> ))}";
+        
+        ZonedDateTime zTime = ZonedDateTime.now();
+        String time = zTime.format(DateTimeFormatter.ISO_INSTANT);
+
+        ZonedDateTime zTime1 = zTime.minusSeconds(30);
+        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+        
+        Literal lit = vf.createLiteral(dtf.newXMLGregorianCalendar(time));
+        Literal lit1 = vf.createLiteral(dtf.newXMLGregorianCalendar(time1));
+
+        // Create the Statements that will be loaded into Rya.
+        final Collection<Statement> statements = Sets.newHashSet(
+                vf.createStatement(vf.createURI("uri:event1"), vf.createURI("uri:startTime"), lit),
+                vf.createStatement(vf.createURI("uri:event2"), vf.createURI("uri:startTime"), lit1)
+               );
+
+        // Create the expected results of the SPARQL query once the PCJ has been computed.
+        final Set<BindingSet> expectedResults = new HashSet<>();
+
+        MapBindingSet bs = new MapBindingSet();
+        bs.addBinding("event", vf.createURI("uri:event1"));
+        bs.addBinding("startTime", lit);
+        expectedResults.add(bs);
+
+        // Verify the end results of the query match the expected results.
+        runTest(sparql, statements, expectedResults, ExportStrategy.RYA);
+    }
+
+
+    
     @Test
     public void periodicQueryTestWithoutAggregation() throws Exception {
         String query = "prefix function: <http://org.apache.rya/function#> " // n



[10/11] incubator-rya git commit: RYA-398 Changed BeforeClass to Before. Closes #239.

Posted by ca...@apache.org.
RYA-398 Changed BeforeClass to Before. Closes #239.

If MongEntityIndexTest gets run first, it will fail
since conf hasn't been initialized yet


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/a3b20420
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/a3b20420
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/a3b20420

Branch: refs/heads/master
Commit: a3b20420c68252f6e7b21db8959173b0779b1a7e
Parents: 26e9214
Author: Andrew Smith <sm...@gmail.com>
Authored: Thu Oct 5 16:26:25 2017 -0400
Committer: jdasch <hc...@gmail.com>
Committed: Thu Oct 12 12:55:45 2017 -0400

----------------------------------------------------------------------
 .../org/apache/rya/indexing/mongo/MongoEntityIndexTest.java | 9 +++++----
 .../apache/rya/indexing/mongo/MongoFreeTextIndexerTest.java | 7 -------
 2 files changed, 5 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/a3b20420/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoEntityIndexTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoEntityIndexTest.java b/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoEntityIndexTest.java
index 850640f..2fe0cb3 100644
--- a/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoEntityIndexTest.java
+++ b/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoEntityIndexTest.java
@@ -31,6 +31,7 @@ import org.apache.rya.indexing.entity.storage.EntityStorage;
 import org.apache.rya.indexing.entity.storage.TypeStorage;
 import org.apache.rya.mongodb.MongoTestBase;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.openrdf.model.vocabulary.RDF;
@@ -54,11 +55,11 @@ public class MongoEntityIndexTest extends MongoTestBase {
                     .build());
     private static final RyaURI RYA_PERSON_TYPE = new RyaURI("urn:person");
 
-    private static EntityIndexOptimizer optimizer;
-    private static EntityStorage entityStorage;
+    private EntityIndexOptimizer optimizer;
+    private EntityStorage entityStorage;
 
-    @BeforeClass
-    public static void beforeClass() throws Exception {
+    @Before
+    public void beforeClass() throws Exception {
         optimizer = new EntityIndexOptimizer();
         optimizer.setConf(conf);
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/a3b20420/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoFreeTextIndexerTest.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoFreeTextIndexerTest.java b/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoFreeTextIndexerTest.java
index 1c712ca..f111fd1 100644
--- a/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoFreeTextIndexerTest.java
+++ b/extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoFreeTextIndexerTest.java
@@ -99,11 +99,7 @@ public class MongoFreeTextIndexerTest extends MongoTestBase {
             f.flush();
 
 
-            System.out.println("testDelete: BEFORE DELETE");
-
             f.deleteStatement(RdfToRyaConversions.convertStatement(statement1));
-            System.out.println("testDelete: AFTER FIRST DELETION");
-//            Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this is a new hat", EMPTY_CONSTRAINTS)));
             Assert.assertEquals(Sets.newHashSet(statement2), getSet(f.queryText("Do you like my new hat?", EMPTY_CONSTRAINTS)));
 
             // Check that "new" didn't get deleted from the term table after "this is a new hat"
@@ -111,9 +107,6 @@ public class MongoFreeTextIndexerTest extends MongoTestBase {
             Assert.assertEquals(Sets.newHashSet(statement2), getSet(f.queryText("new", EMPTY_CONSTRAINTS)));
 
             f.deleteStatement(RdfToRyaConversions.convertStatement(statement2));
-            System.out.println("testDelete: AFTER LAST DELETION");
-
-            System.out.println("testDelete: DONE");
             Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("this is a new hat", EMPTY_CONSTRAINTS)));
             Assert.assertEquals(Sets.newHashSet(), getSet(f.queryText("Do you like my new hat?", EMPTY_CONSTRAINTS)));
         }


[04/11] incubator-rya git commit: RYA-401 Fixed all default charset bugs. Closes #243.

Posted by ca...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/periodic.notification/api/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/api/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java b/extras/periodic.notification/api/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java
index 50180ad..129bd6d 100644
--- a/extras/periodic.notification/api/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java
+++ b/extras/periodic.notification/api/src/main/java/org/apache/rya/periodic/notification/serialization/BindingSetSerDe.java
@@ -19,6 +19,7 @@
 package org.apache.rya.periodic.notification.serialization;
 
 import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Map;
 
@@ -43,47 +44,47 @@ public class BindingSetSerDe implements Serializer<BindingSet>, Deserializer<Bin
 
     private static final Logger log = Logger.getLogger(BindingSetSerDe.class);
     private static final AccumuloPcjSerializer serializer =  new AccumuloPcjSerializer();
-    private static final byte[] DELIM_BYTE = "\u0002".getBytes();
-    
-    private byte[] toBytes(BindingSet bindingSet) {
+    private static final byte[] DELIM_BYTE = "\u0002".getBytes(StandardCharsets.UTF_8);
+
+    private byte[] toBytes(final BindingSet bindingSet) {
         try {
             return getBytes(getVarOrder(bindingSet), bindingSet);
-        } catch(Exception e) {
+        } catch(final Exception e) {
             log.trace("Unable to serialize BindingSet: " + bindingSet);
             return new byte[0];
         }
     }
 
-    private BindingSet fromBytes(byte[] bsBytes) {
+    private BindingSet fromBytes(final byte[] bsBytes) {
         try{
-        int firstIndex = Bytes.indexOf(bsBytes, DELIM_BYTE);
-        byte[] varOrderBytes = Arrays.copyOf(bsBytes, firstIndex);
-        byte[] bsBytesNoVarOrder = Arrays.copyOfRange(bsBytes, firstIndex + 1, bsBytes.length);
-        VariableOrder varOrder = new VariableOrder(new String(varOrderBytes,"UTF-8").split(";"));
-        return getBindingSet(varOrder, bsBytesNoVarOrder);
-        } catch(Exception e) {
+            final int firstIndex = Bytes.indexOf(bsBytes, DELIM_BYTE);
+            final byte[] varOrderBytes = Arrays.copyOf(bsBytes, firstIndex);
+            final byte[] bsBytesNoVarOrder = Arrays.copyOfRange(bsBytes, firstIndex + 1, bsBytes.length);
+            final VariableOrder varOrder = new VariableOrder(new String(varOrderBytes,"UTF-8").split(";"));
+            return getBindingSet(varOrder, bsBytesNoVarOrder);
+        } catch(final Exception e) {
             log.trace("Unable to deserialize BindingSet: " + bsBytes);
             return new QueryBindingSet();
         }
     }
-    
-    private VariableOrder getVarOrder(BindingSet bs) {
+
+    private VariableOrder getVarOrder(final BindingSet bs) {
         return new VariableOrder(bs.getBindingNames());
     }
-    
-    private byte[] getBytes(VariableOrder varOrder, BindingSet bs) throws UnsupportedEncodingException, BindingSetConversionException {
-        byte[] bsBytes = serializer.convert(bs, varOrder);
-        String varOrderString = Joiner.on(";").join(varOrder.getVariableOrders());
-        byte[] varOrderBytes = varOrderString.getBytes("UTF-8");
+
+    private byte[] getBytes(final VariableOrder varOrder, final BindingSet bs) throws UnsupportedEncodingException, BindingSetConversionException {
+        final byte[] bsBytes = serializer.convert(bs, varOrder);
+        final String varOrderString = Joiner.on(";").join(varOrder.getVariableOrders());
+        final byte[] varOrderBytes = varOrderString.getBytes("UTF-8");
         return Bytes.concat(varOrderBytes, DELIM_BYTE, bsBytes);
     }
-    
-    private BindingSet getBindingSet(VariableOrder varOrder, byte[] bsBytes) throws BindingSetConversionException {
+
+    private BindingSet getBindingSet(final VariableOrder varOrder, final byte[] bsBytes) throws BindingSetConversionException {
         return serializer.convert(bsBytes, varOrder);
     }
 
     @Override
-    public BindingSet deserialize(String topic, byte[] bytes) {
+    public BindingSet deserialize(final String topic, final byte[] bytes) {
         return fromBytes(bytes);
     }
 
@@ -93,12 +94,12 @@ public class BindingSetSerDe implements Serializer<BindingSet>, Deserializer<Bin
     }
 
     @Override
-    public void configure(Map<String, ?> arg0, boolean arg1) {
+    public void configure(final Map<String, ?> arg0, final boolean arg1) {
         // Do nothing.  Nothing to configure.
     }
 
     @Override
-    public byte[] serialize(String topic, BindingSet bs) {
+    public byte[] serialize(final String topic, final BindingSet bs) {
         return toBytes(bs);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.export/export.accumulo/src/main/java/org/apache/rya/export/accumulo/util/AccumuloRyaUtils.java
----------------------------------------------------------------------
diff --git a/extras/rya.export/export.accumulo/src/main/java/org/apache/rya/export/accumulo/util/AccumuloRyaUtils.java b/extras/rya.export/export.accumulo/src/main/java/org/apache/rya/export/accumulo/util/AccumuloRyaUtils.java
index 115074c..1073b6e 100644
--- a/extras/rya.export/export.accumulo/src/main/java/org/apache/rya/export/accumulo/util/AccumuloRyaUtils.java
+++ b/extras/rya.export/export.accumulo/src/main/java/org/apache/rya/export/accumulo/util/AccumuloRyaUtils.java
@@ -19,6 +19,7 @@
 package org.apache.rya.export.accumulo.util;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Date;
@@ -336,7 +337,7 @@ public final class AccumuloRyaUtils {
     public static Authorizations addUserAuths(final String user, final SecurityOperations secOps, final Authorizations auths) throws AccumuloException, AccumuloSecurityException {
         final List<String> authList = new ArrayList<>();
         for (final byte[] authBytes : auths.getAuthorizations()) {
-            final String auth = new String(authBytes);
+            final String auth = new String(authBytes, StandardCharsets.UTF_8);
             authList.add(auth);
         }
         return addUserAuths(user, secOps, authList.toArray(new String[0]));
@@ -358,7 +359,7 @@ public final class AccumuloRyaUtils {
             authList.add(currentAuth);
         }
         for (final String newAuth : auths) {
-            authList.add(newAuth.getBytes());
+            authList.add(newAuth.getBytes(StandardCharsets.UTF_8));
         }
         final Authorizations result = new Authorizations(authList);
         return result;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.export/export.client/src/main/java/org/apache/rya/export/client/merge/VisibilityStatementMerger.java
----------------------------------------------------------------------
diff --git a/extras/rya.export/export.client/src/main/java/org/apache/rya/export/client/merge/VisibilityStatementMerger.java b/extras/rya.export/export.client/src/main/java/org/apache/rya/export/client/merge/VisibilityStatementMerger.java
index f3d523f..fd1bc4d 100644
--- a/extras/rya.export/export.client/src/main/java/org/apache/rya/export/client/merge/VisibilityStatementMerger.java
+++ b/extras/rya.export/export.client/src/main/java/org/apache/rya/export/client/merge/VisibilityStatementMerger.java
@@ -18,6 +18,8 @@
  */
 package org.apache.rya.export.client.merge;
 
+import java.nio.charset.StandardCharsets;
+
 import org.apache.rya.api.domain.RyaStatement;
 import org.apache.rya.export.api.MergerException;
 import org.apache.rya.export.api.StatementMerger;
@@ -41,8 +43,8 @@ public class VisibilityStatementMerger implements StatementMerger {
             final RyaStatement parentStatement = parent.get();
             if(child.isPresent()) {
                 final RyaStatement childStatement = child.get();
-                final String pVis = new String(parentStatement.getColumnVisibility());
-                final String cVis = new String(childStatement.getColumnVisibility());
+                final String pVis = new String(parentStatement.getColumnVisibility(), StandardCharsets.UTF_8);
+                final String cVis = new String(childStatement.getColumnVisibility(), StandardCharsets.UTF_8);
                 String visibility = "";
                 final Joiner join = Joiner.on(")&(");
                 if(pVis.isEmpty() || cVis.isEmpty()) {
@@ -50,7 +52,7 @@ public class VisibilityStatementMerger implements StatementMerger {
                 } else {
                     visibility = "(" + join.join(pVis, cVis) + ")";
                 }
-                parentStatement.setColumnVisibility(visibility.getBytes());
+                parentStatement.setColumnVisibility(visibility.getBytes(StandardCharsets.UTF_8));
                 return Optional.of(parentStatement);
             }
             return parent;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java
----------------------------------------------------------------------
diff --git a/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java b/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java
index 4597400..59b92ba 100644
--- a/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java
+++ b/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/CopyTool.java
@@ -22,6 +22,7 @@ import java.io.BufferedOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
 import java.nio.file.Paths;
 import java.text.ParseException;
 import java.util.ArrayList;
@@ -81,9 +82,6 @@ import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.BasicConfigurator;
 import org.apache.log4j.Logger;
 import org.apache.log4j.xml.DOMConfigurator;
-
-import com.google.common.base.Joiner;
-
 import org.apache.rya.accumulo.AccumuloRdfConfiguration;
 import org.apache.rya.accumulo.mr.AccumuloHDFSFileInputFormat;
 import org.apache.rya.accumulo.mr.MRUtils;
@@ -105,6 +103,8 @@ import org.apache.rya.api.RdfCloudTripleStoreUtils;
 import org.apache.rya.api.layout.TablePrefixLayoutStrategy;
 import org.apache.rya.indexing.accumulo.ConfigUtils;
 
+import com.google.common.base.Joiner;
+
 /**
  * Handles copying data from a parent instance into a child instance.
  */
@@ -589,9 +589,9 @@ public class CopyTool extends AbstractDualInstanceAccumuloMRTool {
         final Path splitsPath = getPath(baseOutputDir, childTableName, "splits.txt");
         final Collection<Text> splits = parentTableOperations.listSplits(parentTableName, 100);
         log.info("Creating splits file at: " + splitsPath);
-        try (PrintStream out = new PrintStream(new BufferedOutputStream(fs.create(splitsPath)))) {
+        try (PrintStream out = new PrintStream(new BufferedOutputStream(fs.create(splitsPath)), false, StandardCharsets.UTF_8.name())) {
             for (final Text split : splits) {
-                final String encoded = new String(Base64.encodeBase64(TextUtil.getBytes(split)));
+                final String encoded = new String(Base64.encodeBase64(TextUtil.getBytes(split)), StandardCharsets.UTF_8);
                 out.println(encoded);
             }
         }
@@ -873,12 +873,7 @@ public class CopyTool extends AbstractDualInstanceAccumuloMRTool {
         }
         log.info("Starting Copy Tool");
 
-        Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
-            @Override
-            public void uncaughtException(final Thread thread, final Throwable throwable) {
-                log.error("Uncaught exception in " + thread.getName(), throwable);
-            }
-        });
+        Thread.setDefaultUncaughtExceptionHandler((thread, throwable) -> log.error("Uncaught exception in " + thread.getName(), throwable));
 
         final CopyTool copyTool = new CopyTool();
         final int returnCode = copyTool.setupAndRun(args);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/AccumuloRyaUtils.java
----------------------------------------------------------------------
diff --git a/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/AccumuloRyaUtils.java b/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/AccumuloRyaUtils.java
index e702e64..5a3b928 100644
--- a/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/AccumuloRyaUtils.java
+++ b/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/AccumuloRyaUtils.java
@@ -19,6 +19,7 @@
 package org.apache.rya.accumulo.mr.merge.util;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -49,13 +50,6 @@ import org.apache.accumulo.core.security.Authorizations;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.Logger;
-import org.openrdf.model.Literal;
-import org.openrdf.model.ValueFactory;
-
-import com.google.common.base.Joiner;
-import com.google.common.collect.ImmutableSet;
-
-import info.aduna.iteration.CloseableIteration;
 import org.apache.rya.accumulo.AccumuloRdfConfiguration;
 import org.apache.rya.accumulo.AccumuloRyaDAO;
 import org.apache.rya.accumulo.mr.MRUtils;
@@ -66,6 +60,13 @@ import org.apache.rya.api.domain.RyaURI;
 import org.apache.rya.api.persist.RyaDAOException;
 import org.apache.rya.api.resolver.RdfToRyaConversions;
 import org.apache.rya.indexing.accumulo.ConfigUtils;
+import org.openrdf.model.Literal;
+import org.openrdf.model.ValueFactory;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.ImmutableSet;
+
+import info.aduna.iteration.CloseableIteration;
 
 /**
  * Utility methods for an Accumulo Rya instance.
@@ -515,7 +516,7 @@ public final class AccumuloRyaUtils {
     public static Authorizations addUserAuths(final String user, final SecurityOperations secOps, final Authorizations auths) throws AccumuloException, AccumuloSecurityException {
         final List<String> authList = new ArrayList<>();
         for (final byte[] authBytes : auths.getAuthorizations()) {
-            final String auth = new String(authBytes);
+            final String auth = new String(authBytes, StandardCharsets.UTF_8);
             authList.add(auth);
         }
         return addUserAuths(user, secOps, authList.toArray(new String[0]));
@@ -537,7 +538,7 @@ public final class AccumuloRyaUtils {
             authList.add(currentAuth);
         }
         for (final String newAuth : auths) {
-            authList.add(newAuth.getBytes());
+            authList.add(newAuth.getBytes(StandardCharsets.UTF_8));
         }
         final Authorizations result = new Authorizations(authList);
         return result;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/QueryRuleset.java
----------------------------------------------------------------------
diff --git a/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/QueryRuleset.java b/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/QueryRuleset.java
index 9627c54..42109db 100644
--- a/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/QueryRuleset.java
+++ b/extras/rya.merger/src/main/java/org/apache/rya/accumulo/mr/merge/util/QueryRuleset.java
@@ -18,15 +18,32 @@
  */
 package org.apache.rya.accumulo.mr.merge.util;
 
-import java.io.BufferedReader;
-import java.io.FileReader;
+import java.io.File;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Set;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.log4j.Logger;
+import org.apache.rya.accumulo.mr.merge.CopyTool;
+import org.apache.rya.accumulo.mr.merge.util.QueryRuleset.QueryRulesetException;
+import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
+import org.apache.rya.rdftriplestore.RdfCloudTripleStore;
+import org.apache.rya.rdftriplestore.inference.InferJoin;
+import org.apache.rya.rdftriplestore.inference.InferUnion;
+import org.apache.rya.rdftriplestore.inference.InferenceEngine;
+import org.apache.rya.rdftriplestore.inference.InverseOfVisitor;
+import org.apache.rya.rdftriplestore.inference.SameAsVisitor;
+import org.apache.rya.rdftriplestore.inference.SubClassOfVisitor;
+import org.apache.rya.rdftriplestore.inference.SubPropertyOfVisitor;
+import org.apache.rya.rdftriplestore.inference.SymmetricPropertyVisitor;
+import org.apache.rya.rdftriplestore.inference.TransitivePropertyVisitor;
+import org.apache.rya.rdftriplestore.utils.FixedStatementPattern;
+import org.apache.rya.rdftriplestore.utils.TransitivePropertySP;
+import org.apache.rya.sail.config.RyaSailFactory;
 import org.openrdf.model.Statement;
 import org.openrdf.model.URI;
 import org.openrdf.model.Value;
@@ -52,22 +69,6 @@ import org.openrdf.query.parser.ParsedTupleQuery;
 import org.openrdf.query.parser.QueryParserUtil;
 import org.openrdf.sail.SailException;
 
-import org.apache.rya.accumulo.mr.merge.CopyTool;
-import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
-import org.apache.rya.rdftriplestore.RdfCloudTripleStore;
-import org.apache.rya.rdftriplestore.inference.InferJoin;
-import org.apache.rya.rdftriplestore.inference.InferUnion;
-import org.apache.rya.rdftriplestore.inference.InferenceEngine;
-import org.apache.rya.rdftriplestore.inference.InverseOfVisitor;
-import org.apache.rya.rdftriplestore.inference.SameAsVisitor;
-import org.apache.rya.rdftriplestore.inference.SubClassOfVisitor;
-import org.apache.rya.rdftriplestore.inference.SubPropertyOfVisitor;
-import org.apache.rya.rdftriplestore.inference.SymmetricPropertyVisitor;
-import org.apache.rya.rdftriplestore.inference.TransitivePropertyVisitor;
-import org.apache.rya.rdftriplestore.utils.FixedStatementPattern;
-import org.apache.rya.rdftriplestore.utils.TransitivePropertySP;
-import org.apache.rya.sail.config.RyaSailFactory;
-
 /**
  * Represents a set of {@link CopyRule} instances derived from a query. The ruleset determines a logical
  * subset of statements in Rya, such that statements selected by the ruleset are at least enough to answer
@@ -432,16 +433,7 @@ public class QueryRuleset {
         final String queryFile = conf.get(CopyTool.QUERY_FILE_PROP);
         if (query == null && queryFile != null) {
             try {
-                final FileReader fileReader = new FileReader(queryFile);
-                final BufferedReader reader = new BufferedReader(fileReader);
-                final StringBuilder builder = new StringBuilder();
-                String line = reader.readLine();
-                while (line != null) {
-                    builder.append(line).append("\n");
-                    line = reader.readLine();
-                }
-                query = builder.toString();
-                reader.close();
+                query = FileUtils.readFileToString(new File(queryFile), StandardCharsets.UTF_8);
                 conf.set(CopyTool.QUERY_STRING_PROP, query);
             }
             catch (final IOException e) {

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java b/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java
index 4070849..3fea6ed 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.demo/src/main/java/org/apache/rya/indexing/pcj/fluo/demo/FluoAndHistoricPcjsDemo.java
@@ -18,6 +18,7 @@
  */
 package org.apache.rya.indexing.pcj.fluo.demo;
 
+import java.nio.charset.StandardCharsets;
 import java.util.Set;
 
 import org.apache.accumulo.core.client.Connector;
@@ -96,7 +97,7 @@ public class FluoAndHistoricPcjsDemo implements Demo {
     /**
      * Used to pause the demo waiting for the presenter to hit the Enter key.
      */
-    private final java.util.Scanner keyboard = new java.util.Scanner(System.in);
+    private final java.util.Scanner keyboard = new java.util.Scanner(System.in, StandardCharsets.UTF_8.name());
 
     @Override
     public void execute(

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.prospector/src/main/java/org/apache/rya/joinselect/mr/JoinSelectProspectOutput.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/org/apache/rya/joinselect/mr/JoinSelectProspectOutput.java b/extras/rya.prospector/src/main/java/org/apache/rya/joinselect/mr/JoinSelectProspectOutput.java
index 105e852..f630df0 100644
--- a/extras/rya.prospector/src/main/java/org/apache/rya/joinselect/mr/JoinSelectProspectOutput.java
+++ b/extras/rya.prospector/src/main/java/org/apache/rya/joinselect/mr/JoinSelectProspectOutput.java
@@ -1,5 +1,3 @@
-package org.apache.rya.joinselect.mr;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.joinselect.mr;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,21 +16,16 @@ package org.apache.rya.joinselect.mr;
  * specific language governing permissions and limitations
  * under the License.
  */
-
-
+package org.apache.rya.joinselect.mr;
 
 import static org.apache.rya.joinselect.mr.utils.JoinSelectConstants.AUTHS;
 import static org.apache.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_OUTPUTPATH;
 import static org.apache.rya.joinselect.mr.utils.JoinSelectConstants.PROSPECTS_TABLE;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.regex.Pattern;
 
-import org.apache.rya.joinselect.mr.utils.CardinalityType;
-import org.apache.rya.joinselect.mr.utils.CompositeType;
-import org.apache.rya.joinselect.mr.utils.JoinSelectStatsUtil;
-import org.apache.rya.joinselect.mr.utils.TripleCard;
-
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
@@ -45,6 +38,10 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.util.Tool;
+import org.apache.rya.joinselect.mr.utils.CardinalityType;
+import org.apache.rya.joinselect.mr.utils.CompositeType;
+import org.apache.rya.joinselect.mr.utils.JoinSelectStatsUtil;
+import org.apache.rya.joinselect.mr.utils.TripleCard;
 
 public class JoinSelectProspectOutput extends Configured implements Tool {
 
@@ -55,21 +52,22 @@ public class JoinSelectProspectOutput extends Configured implements Tool {
     Text inText = new Text();
     Pattern splitPattern = Pattern.compile(DELIM);
 
-    public void map(Key key, Value data, Context context) throws IOException, InterruptedException {
+    @Override
+    public void map(final Key key, final Value data, final Context context) throws IOException, InterruptedException {
 
       key.getRow(inText);
-      String[] cardData = splitPattern.split(inText.toString().trim(), 4);
+      final String[] cardData = splitPattern.split(inText.toString().trim(), 4);
       // System.out.println("Card data is " + cardData[0] + ", "+ cardData[1] + ", "+ cardData[2]);
       if (cardData.length == 3 && ((cardData[0].equals("subject")) || (cardData[0].equals("object")) || (cardData[0].equals("predicate")))) {
-        Text tripleValType = new Text(cardData[0]);
-        Text cardKey = new Text(cardData[1]);
-        LongWritable ts = new LongWritable(Long.valueOf(cardData[2]));
+        final Text tripleValType = new Text(cardData[0]);
+        final Text cardKey = new Text(cardData[1]);
+        final LongWritable ts = new LongWritable(Long.valueOf(cardData[2]));
 
-        String s = new String(data.get());
-        LongWritable card = new LongWritable(Long.parseLong(s));
+        final String s = new String(data.get(), StandardCharsets.UTF_8);
+        final LongWritable card = new LongWritable(Long.parseLong(s));
 
-        CompositeType cType = new CompositeType(cardKey, new IntWritable(1));
-        TripleCard tCard = new TripleCard(new CardinalityType(card, tripleValType, ts));
+        final CompositeType cType = new CompositeType(cardKey, new IntWritable(1));
+        final TripleCard tCard = new TripleCard(new CardinalityType(card, tripleValType, ts));
 
         context.write(new CompositeType(cardKey, new IntWritable(1)), new TripleCard(new CardinalityType(card, tripleValType, ts)));
         // System.out.println("Card mapper output key is " + cType + " and value is " + tCard );
@@ -77,15 +75,15 @@ public class JoinSelectProspectOutput extends Configured implements Tool {
       } else if (cardData.length == 4
           && ((cardData[0].equals("subjectpredicate")) || (cardData[0].equals("subjectobject")) || (cardData[0].equals("predicateobject")))) {
 
-        Text tripleValType = new Text(cardData[0]);
-        Text cardKey = new Text(cardData[1] + DELIM + cardData[2]);
-        LongWritable ts = new LongWritable(Long.valueOf(cardData[3]));
+        final Text tripleValType = new Text(cardData[0]);
+        final Text cardKey = new Text(cardData[1] + DELIM + cardData[2]);
+        final LongWritable ts = new LongWritable(Long.valueOf(cardData[3]));
 
-        String s = new String(data.get());
-        LongWritable card = new LongWritable(Long.parseLong(s));
+        final String s = new String(data.get(), StandardCharsets.UTF_8);
+        final LongWritable card = new LongWritable(Long.parseLong(s));
 
-        CompositeType cType = new CompositeType(cardKey, new IntWritable(1));
-        TripleCard tCard = new TripleCard(new CardinalityType(card, tripleValType, ts));
+        final CompositeType cType = new CompositeType(cardKey, new IntWritable(1));
+        final TripleCard tCard = new TripleCard(new CardinalityType(card, tripleValType, ts));
 
         context.write(new CompositeType(cardKey, new IntWritable(1)), new TripleCard(new CardinalityType(card, tripleValType, ts)));
         // System.out.println("Card mapper output key is " + cType + " and value is " + tCard );
@@ -97,16 +95,16 @@ public class JoinSelectProspectOutput extends Configured implements Tool {
   }
 
   @Override
-  public int run(String[] args) throws AccumuloSecurityException, IOException, ClassNotFoundException, InterruptedException {
+  public int run(final String[] args) throws AccumuloSecurityException, IOException, ClassNotFoundException, InterruptedException {
 
-    Configuration conf = getConf();
-    String inTable = conf.get(PROSPECTS_TABLE);
-    String auths = conf.get(AUTHS);
-    String outPath = conf.get(PROSPECTS_OUTPUTPATH);
+    final Configuration conf = getConf();
+    final String inTable = conf.get(PROSPECTS_TABLE);
+    final String auths = conf.get(AUTHS);
+    final String outPath = conf.get(PROSPECTS_OUTPUTPATH);
 
     assert inTable != null && outPath != null;
 
-    Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
+    final Job job = new Job(conf, this.getClass().getSimpleName() + "_" + System.currentTimeMillis());
     job.setJarByClass(this.getClass());
     conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.prospector/src/main/java/org/apache/rya/prospector/plans/impl/CountPlan.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/org/apache/rya/prospector/plans/impl/CountPlan.java b/extras/rya.prospector/src/main/java/org/apache/rya/prospector/plans/impl/CountPlan.java
index ebcf6c3..f408b7d 100644
--- a/extras/rya.prospector/src/main/java/org/apache/rya/prospector/plans/impl/CountPlan.java
+++ b/extras/rya.prospector/src/main/java/org/apache/rya/prospector/plans/impl/CountPlan.java
@@ -21,6 +21,7 @@ package org.apache.rya.prospector.plans.impl;
 import static org.apache.rya.prospector.utils.ProspectorConstants.COUNT;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -62,7 +63,7 @@ import org.openrdf.model.vocabulary.XMLSchema;
 public class CountPlan implements IndexWorkPlan {
 
     @Override
-    public Collection<Map.Entry<IntermediateProspect, LongWritable>> map(RyaStatement ryaStatement) {
+    public Collection<Map.Entry<IntermediateProspect, LongWritable>> map(final RyaStatement ryaStatement) {
         final RyaURI subject = ryaStatement.getSubject();
         final RyaURI predicate = ryaStatement.getPredicate();
         final String subjpred = ryaStatement.getSubject().getData() + DELIM + ryaStatement.getPredicate().getData();
@@ -71,7 +72,7 @@ public class CountPlan implements IndexWorkPlan {
         final RyaType object = ryaStatement.getObject();
         final int localIndex = URIUtil.getLocalNameIndex(subject.getData());
         final String namespace = subject.getData().substring(0, localIndex - 1);
-        final String visibility = new String(ryaStatement.getColumnVisibility());
+        final String visibility = new String(ryaStatement.getColumnVisibility(), StandardCharsets.UTF_8);
 
         final List<Map.Entry<IntermediateProspect, LongWritable>> entries = new ArrayList<>(7);
 
@@ -149,7 +150,7 @@ public class CountPlan implements IndexWorkPlan {
     }
 
     @Override
-    public Collection<Map.Entry<IntermediateProspect, LongWritable>> combine(IntermediateProspect prospect, Iterable<LongWritable> counts) {
+    public Collection<Map.Entry<IntermediateProspect, LongWritable>> combine(final IntermediateProspect prospect, final Iterable<LongWritable> counts) {
         long sum = 0;
         for(final LongWritable count : counts) {
             sum += count.get();
@@ -158,7 +159,7 @@ public class CountPlan implements IndexWorkPlan {
     }
 
     @Override
-    public void reduce(IntermediateProspect prospect, Iterable<LongWritable> counts, Date timestamp, Reducer.Context context) throws IOException, InterruptedException {
+    public void reduce(final IntermediateProspect prospect, final Iterable<LongWritable> counts, final Date timestamp, final Reducer.Context context) throws IOException, InterruptedException {
         long sum = 0;
         for(final LongWritable count : counts) {
             sum += count.get();
@@ -172,7 +173,7 @@ public class CountPlan implements IndexWorkPlan {
 
             final String dataType = prospect.getDataType();
             final ColumnVisibility visibility = new ColumnVisibility(prospect.getVisibility());
-            final Value sumValue = new Value(("" + sum).getBytes());
+            final Value sumValue = new Value(("" + sum).getBytes(StandardCharsets.UTF_8));
             m.put(COUNT, prospect.getDataType(), visibility, timestamp.getTime(), sumValue);
 
             context.write(null, m);
@@ -185,7 +186,7 @@ public class CountPlan implements IndexWorkPlan {
     }
 
     @Override
-    public String getCompositeValue(List<String> indices){
+    public String getCompositeValue(final List<String> indices){
         final Iterator<String> indexIt = indices.iterator();
         String compositeIndex = indexIt.next();
         while (indexIt.hasNext()){
@@ -196,7 +197,7 @@ public class CountPlan implements IndexWorkPlan {
     }
 
     @Override
-    public List<IndexEntry> query(Connector connector, String tableName, List<Long> prospectTimes, String type, String compositeIndex, String dataType, String[] auths) throws TableNotFoundException {
+    public List<IndexEntry> query(final Connector connector, final String tableName, final List<Long> prospectTimes, final String type, final String compositeIndex, final String dataType, final String[] auths) throws TableNotFoundException {
         assert connector != null && tableName != null && type != null && compositeIndex != null;
 
         final BatchScanner bs = connector.createBatchScanner(tableName, new Authorizations(auths), 4);
@@ -242,7 +243,7 @@ public class CountPlan implements IndexWorkPlan {
             // Create an entry using the values that were found.
             final String entryDataType = k.getColumnQualifier().toString();
             final String entryVisibility = k.getColumnVisibility().toString();
-            final Long entryCount = Long.parseLong(new String(v.get()));
+            final Long entryCount = Long.parseLong(new String(v.get(), StandardCharsets.UTF_8));
 
             indexEntries.add(
                     IndexEntry.builder()

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.prospector/src/main/java/org/apache/rya/prospector/utils/ProspectorUtils.java
----------------------------------------------------------------------
diff --git a/extras/rya.prospector/src/main/java/org/apache/rya/prospector/utils/ProspectorUtils.java b/extras/rya.prospector/src/main/java/org/apache/rya/prospector/utils/ProspectorUtils.java
index 4dc9253..d75730b 100644
--- a/extras/rya.prospector/src/main/java/org/apache/rya/prospector/utils/ProspectorUtils.java
+++ b/extras/rya.prospector/src/main/java/org/apache/rya/prospector/utils/ProspectorUtils.java
@@ -24,6 +24,7 @@ import static org.apache.rya.prospector.utils.ProspectorConstants.PASSWORD;
 import static org.apache.rya.prospector.utils.ProspectorConstants.USERNAME;
 import static org.apache.rya.prospector.utils.ProspectorConstants.ZOOKEEPERS;
 
+import java.nio.charset.StandardCharsets;
 import java.text.SimpleDateFormat;
 import java.util.Collection;
 import java.util.Date;
@@ -56,7 +57,7 @@ public class ProspectorUtils {
     public static final long INDEXED_DATE_SORT_VAL = 999999999999999999L; // 18 char long, same length as date format pattern below
     public static final String INDEXED_DATE_FORMAT = "yyyyMMddHHmmsssSSS";
 
-    public static String getReverseIndexDateTime(Date date) {
+    public static String getReverseIndexDateTime(final Date date) {
         Validate.notNull(date);
         final String formattedDateString = new SimpleDateFormat(INDEXED_DATE_FORMAT).format(date);
         final long diff = INDEXED_DATE_SORT_VAL - Long.valueOf(formattedDateString);
@@ -64,7 +65,7 @@ public class ProspectorUtils {
         return Long.toString(diff);
     }
 
-    public static Map<String, IndexWorkPlan> planMap(Collection<IndexWorkPlan> plans) {
+    public static Map<String, IndexWorkPlan> planMap(final Collection<IndexWorkPlan> plans) {
         final Map<String, IndexWorkPlan> planMap = new HashMap<>();
         for(final IndexWorkPlan plan : plans) {
             planMap.put(plan.getIndexType(), plan);
@@ -72,7 +73,7 @@ public class ProspectorUtils {
         return planMap;
     }
 
-    public static void initMRJob(Job job, String table, String outtable, String[] auths) throws AccumuloSecurityException {
+    public static void initMRJob(final Job job, final String table, final String outtable, final String[] auths) throws AccumuloSecurityException {
         final Configuration conf = job.getConfiguration();
         final String username = conf.get(USERNAME);
         final String password = conf.get(PASSWORD);
@@ -91,7 +92,7 @@ public class ProspectorUtils {
             throw new IllegalArgumentException("Must specify either mock or zookeepers");
         }
 
-        AccumuloInputFormat.setConnectorInfo(job, username, new PasswordToken(password.getBytes()));
+        AccumuloInputFormat.setConnectorInfo(job, username, new PasswordToken(password.getBytes(StandardCharsets.UTF_8)));
         AccumuloInputFormat.setInputTableName(job, table);
         job.setInputFormatClass(AccumuloInputFormat.class);
         AccumuloInputFormat.setScanAuthorizations(job, new Authorizations(auths));
@@ -100,11 +101,11 @@ public class ProspectorUtils {
         job.setOutputFormatClass(AccumuloOutputFormat.class);
         job.setOutputKeyClass(Text.class);
         job.setOutputValueClass(Mutation.class);
-        AccumuloOutputFormat.setConnectorInfo(job, username, new PasswordToken(password.getBytes()));
+        AccumuloOutputFormat.setConnectorInfo(job, username, new PasswordToken(password.getBytes(StandardCharsets.UTF_8)));
         AccumuloOutputFormat.setDefaultTableName(job, outtable);
     }
 
-    public static void addMRPerformance(Configuration conf) {
+    public static void addMRPerformance(final Configuration conf) {
         conf.setBoolean("mapred.map.tasks.speculative.execution", false);
         conf.setBoolean("mapred.reduce.tasks.speculative.execution", false);
         conf.set("io.sort.mb", "256");
@@ -112,7 +113,7 @@ public class ProspectorUtils {
         conf.set("mapred.map.output.compression.codec", GzipCodec.class.getName());
     }
 
-    public static Instance instance(Configuration conf) {
+    public static Instance instance(final Configuration conf) {
         assert conf != null;
 
         final String instance_str = conf.get(INSTANCE);
@@ -127,7 +128,7 @@ public class ProspectorUtils {
         }
     }
 
-    public static Connector connector(Instance instance, Configuration conf) throws AccumuloException, AccumuloSecurityException {
+    public static Connector connector(Instance instance, final Configuration conf) throws AccumuloException, AccumuloSecurityException {
         final String username = conf.get(USERNAME);
         final String password = conf.get(PASSWORD);
         if (instance == null) {
@@ -136,7 +137,7 @@ public class ProspectorUtils {
         return instance.getConnector(username, new PasswordToken(password));
     }
 
-    public static void writeMutations(Connector connector, String tableName, Collection<Mutation> mutations) throws TableNotFoundException, MutationsRejectedException {
+    public static void writeMutations(final Connector connector, final String tableName, final Collection<Mutation> mutations) throws TableNotFoundException, MutationsRejectedException {
         final BatchWriter bw = connector.createBatchWriter(tableName, 10000l, 10000l, 4);
         for(final Mutation mutation : mutations) {
             bw.addMutation(mutation);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/rya.reasoning/src/main/java/org/apache/rya/reasoning/mr/ConformanceTest.java
----------------------------------------------------------------------
diff --git a/extras/rya.reasoning/src/main/java/org/apache/rya/reasoning/mr/ConformanceTest.java b/extras/rya.reasoning/src/main/java/org/apache/rya/reasoning/mr/ConformanceTest.java
index bb78373..5515a5b 100644
--- a/extras/rya.reasoning/src/main/java/org/apache/rya/reasoning/mr/ConformanceTest.java
+++ b/extras/rya.reasoning/src/main/java/org/apache/rya/reasoning/mr/ConformanceTest.java
@@ -25,6 +25,7 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.io.StringReader;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -33,10 +34,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.rya.accumulo.mr.MRUtils;
-import org.apache.rya.reasoning.Fact;
-import org.apache.rya.reasoning.Schema;
-
 import org.apache.accumulo.minicluster.MiniAccumuloCluster;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -45,13 +42,16 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.rya.accumulo.mr.MRUtils;
+import org.apache.rya.reasoning.Fact;
+import org.apache.rya.reasoning.Schema;
 import org.openrdf.OpenRDFException;
 import org.openrdf.model.Resource;
 import org.openrdf.model.Statement;
 import org.openrdf.model.URI;
 import org.openrdf.model.Value;
-import org.openrdf.model.vocabulary.RDF;
 import org.openrdf.model.vocabulary.OWL;
+import org.openrdf.model.vocabulary.RDF;
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.QueryLanguage;
 import org.openrdf.query.TupleQuery;
@@ -98,7 +98,7 @@ public class ConformanceTest extends Configured implements Tool {
         Set<Statement> inferred = new HashSet<>();
         Set<Statement> error = new HashSet<>();
         @Override
-        public void handleStatement(Statement st) {
+        public void handleStatement(final Statement st) {
             if (types.contains(TEST_ENTAILMENT)) {
                 expected.add(st);
             }
@@ -107,7 +107,7 @@ public class ConformanceTest extends Configured implements Tool {
             }
         }
         String type() {
-            StringBuilder sb = new StringBuilder();
+            final StringBuilder sb = new StringBuilder();
             if (types.contains(TEST_CONSISTENCY)) {
                 sb.append("{Consistency}");
             }
@@ -127,17 +127,17 @@ public class ConformanceTest extends Configured implements Tool {
     private static class OutputCollector extends RDFHandlerBase {
         Set<Statement> triples = new HashSet<>();
         @Override
-        public void handleStatement(Statement st) {
+        public void handleStatement(final Statement st) {
             triples.add(st);
         }
     }
 
-    public static void main(String[] args) throws Exception {
+    public static void main(final String[] args) throws Exception {
         ToolRunner.run(new ConformanceTest(), args);
     }
 
     @Override
-    public int run(String[] args) throws Exception {
+    public int run(final String[] args) throws Exception {
         // Validate command
         if (args.length < 1 || args.length > 2) {
             System.out.println("Usage:\n");
@@ -155,11 +155,11 @@ public class ConformanceTest extends Configured implements Tool {
             System.exit(1);
         }
 
-        Set<Value> conformanceTestURIs = new HashSet<>();
+        final Set<Value> conformanceTestURIs = new HashSet<>();
         Collection<OwlTest> conformanceTests = new LinkedList<>();
-        List<OwlTest> successes = new LinkedList<>();
-        List<OwlTest> failures = new LinkedList<>();
-        Configuration conf = getConf();
+        final List<OwlTest> successes = new LinkedList<>();
+        final List<OwlTest> failures = new LinkedList<>();
+        final Configuration conf = getConf();
         Repository repo;
         File workingDir;
 
@@ -167,13 +167,13 @@ public class ConformanceTest extends Configured implements Tool {
         if (args.length == 2) {
             workingDir = new File(args[1]);
             RDFFormat inputFormat= RDFFormat.RDFXML;
-            String formatString = conf.get(MRUtils.FORMAT_PROP);
+            final String formatString = conf.get(MRUtils.FORMAT_PROP);
             if (formatString != null) {
                 inputFormat = RDFFormat.valueOf(formatString);
             }
             repo = new SailRepository(new MemoryStore());
             repo.initialize();
-            RepositoryConnection conn = repo.getConnection();
+            final RepositoryConnection conn = repo.getConnection();
             conn.add(new FileInputStream(args[0]), "", inputFormat);
             conn.close();
         }
@@ -185,7 +185,7 @@ public class ConformanceTest extends Configured implements Tool {
         }
 
         // Query for the tests we're interested in
-        RepositoryConnection conn = repo.getConnection();
+        final RepositoryConnection conn = repo.getConnection();
         conformanceTestURIs.addAll(getTestURIs(conn, TEST_INCONSISTENCY));
         conformanceTestURIs.addAll(getTestURIs(conn, TEST_CONSISTENCY));
         conformanceTestURIs.addAll(getTestURIs(conn, TEST_ENTAILMENT));
@@ -195,9 +195,9 @@ public class ConformanceTest extends Configured implements Tool {
         repo.shutDown();
 
         // Set up a MiniAccumulo cluster and set up conf to connect to it
-        String username = "root";
-        String password = "root";
-        MiniAccumuloCluster mini = new MiniAccumuloCluster(workingDir, password);
+        final String username = "root";
+        final String password = "root";
+        final MiniAccumuloCluster mini = new MiniAccumuloCluster(workingDir, password);
         mini.start();
         conf.set(MRUtils.AC_INSTANCE_PROP, mini.getInstanceName());
         conf.set(MRUtils.AC_ZK_PROP, mini.getZooKeepers());
@@ -207,7 +207,7 @@ public class ConformanceTest extends Configured implements Tool {
         conf.set(MRUtils.TABLE_PREFIX_PROPERTY, "temp_");
         // Run the conformance tests
         int result;
-        for (OwlTest test : conformanceTests) {
+        for (final OwlTest test : conformanceTests) {
             System.out.println(test.uri);
             result = runTest(conf, args, test);
             if (result != 0) {
@@ -225,14 +225,14 @@ public class ConformanceTest extends Configured implements Tool {
         mini.stop();
 
         System.out.println("\n" + successes.size() + " successful tests:");
-        for (OwlTest test : successes) {
+        for (final OwlTest test : successes) {
             System.out.println("\t[SUCCESS] " + test.type() + " " + test.name);
         }
         System.out.println("\n" + failures.size() + " failed tests:");
-        for (OwlTest test : failures) {
+        for (final OwlTest test : failures) {
             System.out.println("\t[FAIL] " + test.type() + " " + test.name);
             System.out.println("\t\t(" + test.description + ")");
-            for (Statement triple : test.error) {
+            for (final Statement triple : test.error) {
                 if (test.types.contains(TEST_ENTAILMENT)) {
                     System.out.println("\t\tExpected: " + triple);
                 }
@@ -250,23 +250,23 @@ public class ConformanceTest extends Configured implements Tool {
      * @param   OwlTest   Contains premise/conclusion graphs, will store result
      * @return  Return value of the MapReduce job
      */
-    int runTest(Configuration conf, String[] args, OwlTest test)
+    int runTest(final Configuration conf, final String[] args, final OwlTest test)
             throws Exception {
         conf.setInt(MRReasoningUtils.STEP_PROP, 0);
         conf.setInt(MRReasoningUtils.SCHEMA_UPDATE_PROP, 0);
         conf.setBoolean(MRReasoningUtils.DEBUG_FLAG, true);
         conf.setBoolean(MRReasoningUtils.OUTPUT_FLAG, true);
         // Connect to MiniAccumulo and load the test
-        Repository repo = MRReasoningUtils.getRepository(conf);
+        final Repository repo = MRReasoningUtils.getRepository(conf);
         repo.initialize();
-        RepositoryConnection conn = repo.getConnection();
+        final RepositoryConnection conn = repo.getConnection();
         conn.clear();
         conn.add(new StringReader(test.premise), "", RDFFormat.RDFXML);
         conn.close();
         repo.shutDown();
         // Run the reasoner
-        ReasoningDriver reasoner = new ReasoningDriver();
-        int result = ToolRunner.run(conf, reasoner, args);
+        final ReasoningDriver reasoner = new ReasoningDriver();
+        final int result = ToolRunner.run(conf, reasoner, args);
         test.success = (result == 0);
         // Inconsistency test: successful if determined inconsistent
         if (test.types.contains(TEST_INCONSISTENCY)) {
@@ -281,21 +281,21 @@ public class ConformanceTest extends Configured implements Tool {
             || test.types.contains(TEST_ENTAILMENT))  {
             System.out.println("Reading inferred triples...");
             // Read in the inferred triples from HDFS:
-            Schema schema = MRReasoningUtils.loadSchema(conf);
-            FileSystem fs = FileSystem.get(conf);
-            Path path = MRReasoningUtils.getOutputPath(conf, "final");
-            OutputCollector inferred = new OutputCollector();
-            NTriplesParser parser = new NTriplesParser();
+            final Schema schema = MRReasoningUtils.loadSchema(conf);
+            final FileSystem fs = FileSystem.get(conf);
+            final Path path = MRReasoningUtils.getOutputPath(conf, "final");
+            final OutputCollector inferred = new OutputCollector();
+            final NTriplesParser parser = new NTriplesParser();
             parser.setRDFHandler(inferred);
             if (fs.isDirectory(path)) {
-                for (FileStatus status : fs.listStatus(path)) {
-                    String s = status.getPath().getName();
+                for (final FileStatus status : fs.listStatus(path)) {
+                    final String s = status.getPath().getName();
                     if (s.startsWith(MRReasoningUtils.INCONSISTENT_OUT)
                         || s.startsWith(MRReasoningUtils.DEBUG_OUT)) {
                         continue;
                     }
-                    BufferedReader br = new BufferedReader(
-                        new InputStreamReader(fs.open(status.getPath())));
+                    final BufferedReader br = new BufferedReader(
+                        new InputStreamReader(fs.open(status.getPath()), StandardCharsets.UTF_8));
                     parser.parse(br, "");
                     br.close();
                 }
@@ -306,8 +306,8 @@ public class ConformanceTest extends Configured implements Tool {
             if (test.types.contains(TEST_ENTAILMENT)) {
                 // Check expected inferences against the inferred triples and
                 // the schema reasoner
-                for (Statement st : test.expected) {
-                    Fact fact = new Fact(st);
+                for (final Statement st : test.expected) {
+                    final Fact fact = new Fact(st);
                     if (!test.inferred.contains(st)
                             && !triviallyTrue(fact.getTriple(), schema)
                             && !schema.containsTriple(fact.getTriple())) {
@@ -317,8 +317,8 @@ public class ConformanceTest extends Configured implements Tool {
             }
             // Non-entailment test: failure if non-expected triples inferred
             if (test.types.contains(TEST_NONENTAILMENT)) {
-                for (Statement st : test.unexpected) {
-                    Fact fact = new Fact(st);
+                for (final Statement st : test.unexpected) {
+                    final Fact fact = new Fact(st);
                     if (test.inferred.contains(st)
                         || schema.containsTriple(fact.getTriple())) {
                         test.error.add(st);
@@ -336,18 +336,18 @@ public class ConformanceTest extends Configured implements Tool {
      * Query a connection for conformance tests matching a particular
      * test type.
      */
-    Set<Value> getTestURIs(RepositoryConnection conn, String testType)
+    Set<Value> getTestURIs(final RepositoryConnection conn, final String testType)
             throws IOException, OpenRDFException {
-        Set<Value> testURIs = new HashSet<>();
-        TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL,
+        final Set<Value> testURIs = new HashSet<>();
+        final TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL,
             "select ?test where { " +
             "?test <" + TYPE + "> <" + testType + "> .\n" +
             "?test <" + TEST_PROFILE + "> <" + TEST_RL + "> .\n" +
             "?test <" + TEST_SEMANTICS + "> <" + TEST_RDFBASED + "> .\n" +
             "}");
-        TupleQueryResult queryResult = query.evaluate();
+        final TupleQueryResult queryResult = query.evaluate();
         while (queryResult.hasNext()) {
-            BindingSet bindings = queryResult.next();
+            final BindingSet bindings = queryResult.next();
             testURIs.add(bindings.getValue("test"));
         }
         queryResult.close();
@@ -357,10 +357,10 @@ public class ConformanceTest extends Configured implements Tool {
     /**
      * Query a connection for conformance test details.
      */
-    Collection<OwlTest> getTests(RepositoryConnection conn, Set<Value> testURIs)
+    Collection<OwlTest> getTests(final RepositoryConnection conn, final Set<Value> testURIs)
             throws IOException, OpenRDFException {
-        Map<Value, OwlTest> tests = new HashMap<>();
-        TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL,
+        final Map<Value, OwlTest> tests = new HashMap<>();
+        final TupleQuery query = conn.prepareTupleQuery(QueryLanguage.SPARQL,
             "select * where { " +
             "?test <" + TYPE + "> ?testType .\n" +
             "?test <" + TEST_PREMISE + "> ?graph .\n" +
@@ -371,10 +371,10 @@ public class ConformanceTest extends Configured implements Tool {
             "OPTIONAL {?test <" + TEST_CONCLUSION + "> ?conclusion .}\n" +
             "OPTIONAL {?test <" + TEST_NONCONCLUSION + "> ?nonentailed .}\n" +
             "}");
-        TupleQueryResult queryResult = query.evaluate();
+        final TupleQueryResult queryResult = query.evaluate();
         while (queryResult.hasNext()) {
-            BindingSet bindings = queryResult.next();
-            Value uri = bindings.getValue("test");
+            final BindingSet bindings = queryResult.next();
+            final Value uri = bindings.getValue("test");
             if (testURIs.contains(uri)) {
                 OwlTest test;
                 if (tests.containsKey(uri)) {
@@ -397,9 +397,9 @@ public class ConformanceTest extends Configured implements Tool {
                 test.types.add(bindings.getValue("testType").stringValue());
             }
         }
-        for (OwlTest test : tests.values()) {
+        for (final OwlTest test : tests.values()) {
             if (test.compareTo != null) {
-                RDFXMLParser parser = new RDFXMLParser();
+                final RDFXMLParser parser = new RDFXMLParser();
                 parser.setRDFHandler(test);
                 parser.parse(new StringReader(test.compareTo), "");
             }
@@ -413,10 +413,10 @@ public class ConformanceTest extends Configured implements Tool {
      * tests, such as an implicit "[bnode] type Ontology" triple or a
      * "[class] type Class" triple as long as the class exists.
      */
-    boolean triviallyTrue(Statement triple, Schema schema) {
-        Resource s = triple.getSubject();
-        URI p = triple.getPredicate();
-        Value o = triple.getObject();
+    boolean triviallyTrue(final Statement triple, final Schema schema) {
+        final Resource s = triple.getSubject();
+        final URI p = triple.getPredicate();
+        final Value o = triple.getObject();
         if (p.equals(RDF.TYPE)) {
             if (o.equals(OWL.ONTOLOGY)) {
                 return true;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/mapreduce/src/main/java/org/apache/rya/accumulo/mr/GraphXEdgeInputFormat.java
----------------------------------------------------------------------
diff --git a/mapreduce/src/main/java/org/apache/rya/accumulo/mr/GraphXEdgeInputFormat.java b/mapreduce/src/main/java/org/apache/rya/accumulo/mr/GraphXEdgeInputFormat.java
index 489fd34..eac06c6 100644
--- a/mapreduce/src/main/java/org/apache/rya/accumulo/mr/GraphXEdgeInputFormat.java
+++ b/mapreduce/src/main/java/org/apache/rya/accumulo/mr/GraphXEdgeInputFormat.java
@@ -20,19 +20,12 @@ package org.apache.rya.accumulo.mr;
  */
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.List;
 import java.util.Map.Entry;
 
-import org.apache.rya.accumulo.AccumuloRdfConfiguration;
-import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
-import org.apache.rya.api.domain.RyaStatement;
-import org.apache.rya.api.domain.RyaType;
-import org.apache.rya.api.resolver.RyaTripleContext;
-import org.apache.rya.api.resolver.triple.TripleRow;
-import org.apache.rya.api.resolver.triple.TripleRowResolverException;
-
 import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.mapreduce.AbstractInputFormat;
@@ -43,6 +36,13 @@ import org.apache.hadoop.mapreduce.InputSplit;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.rya.accumulo.AccumuloRdfConfiguration;
+import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
+import org.apache.rya.api.domain.RyaStatement;
+import org.apache.rya.api.domain.RyaType;
+import org.apache.rya.api.resolver.RyaTripleContext;
+import org.apache.rya.api.resolver.triple.TripleRow;
+import org.apache.rya.api.resolver.triple.TripleRowResolverException;
 import org.apache.spark.graphx.Edge;
 
 /**
@@ -63,8 +63,8 @@ public class GraphXEdgeInputFormat extends InputFormatBase<Object, Edge> {
 	 * @return A RecordReader that can be used to fetch RyaStatementWritables.
 	 */
 	@Override
-	public RecordReader<Object, Edge> createRecordReader(InputSplit split,
-			TaskAttemptContext context) {
+	public RecordReader<Object, Edge> createRecordReader(final InputSplit split,
+			final TaskAttemptContext context) {
 		return new RyaStatementRecordReader();
 	}
 
@@ -77,7 +77,7 @@ public class GraphXEdgeInputFormat extends InputFormatBase<Object, Edge> {
 	 *            Statements will be read from the Rya table associated with
 	 *            this layout.
 	 */
-	public static void setTableLayout(Job conf, TABLE_LAYOUT layout) {
+	public static void setTableLayout(final Job conf, final TABLE_LAYOUT layout) {
 		conf.getConfiguration().set(MRUtils.TABLE_LAYOUT_PROP, layout.name());
 	}
 
@@ -89,8 +89,8 @@ public class GraphXEdgeInputFormat extends InputFormatBase<Object, Edge> {
 		private RyaTripleContext ryaContext;
 		private TABLE_LAYOUT tableLayout;
 
-		protected void setupIterators(TaskAttemptContext context,
-				Scanner scanner, String tableName, RangeInputSplit split) {
+		protected void setupIterators(final TaskAttemptContext context,
+				final Scanner scanner, final String tableName, final RangeInputSplit split) {
 		}
 
 		/**
@@ -104,7 +104,7 @@ public class GraphXEdgeInputFormat extends InputFormatBase<Object, Edge> {
 		 *             if thrown by the superclass's initialize method.
 		 */
 		@Override
-		public void initialize(InputSplit inSplit, TaskAttemptContext attempt)
+		public void initialize(final InputSplit inSplit, final TaskAttemptContext attempt)
 				throws IOException {
 			super.initialize(inSplit, attempt);
 			this.tableLayout = MRUtils.getTableLayout(
@@ -127,15 +127,16 @@ public class GraphXEdgeInputFormat extends InputFormatBase<Object, Edge> {
 		 */
 		@Override
 		public boolean nextKeyValue() throws IOException {
-			if (!scannerIterator.hasNext())
-				return false;
-			Entry<Key, Value> entry = scannerIterator.next();
+			if (!scannerIterator.hasNext()) {
+                return false;
+            }
+			final Entry<Key, Value> entry = scannerIterator.next();
 			++numKeysRead;
 			currentKey = entry.getKey();
 			try {
 				currentK = currentKey.getRow();
-				RyaTypeWritable rtw = new RyaTypeWritable();
-				RyaStatement stmt = this.ryaContext.deserializeTriple(
+				final RyaTypeWritable rtw = new RyaTypeWritable();
+				final RyaStatement stmt = this.ryaContext.deserializeTriple(
 						this.tableLayout, new TripleRow(entry.getKey().getRow()
 								.getBytes(), entry.getKey().getColumnFamily()
 								.getBytes(), entry.getKey()
@@ -144,28 +145,28 @@ public class GraphXEdgeInputFormat extends InputFormatBase<Object, Edge> {
 								.getColumnVisibility().getBytes(), entry
 								.getValue().get()));
 
-				long subHash = getVertexId(stmt.getSubject());
-				long objHash = getVertexId(stmt.getObject());
+				final long subHash = getVertexId(stmt.getSubject());
+				final long objHash = getVertexId(stmt.getObject());
 				rtw.setRyaType(stmt.getPredicate());
 
-				Edge<RyaTypeWritable> writable = new Edge<RyaTypeWritable>(
+				final Edge<RyaTypeWritable> writable = new Edge<RyaTypeWritable>(
 						subHash, objHash, rtw);
 				currentV = writable;
-			} catch (TripleRowResolverException e) {
+			} catch (final TripleRowResolverException e) {
 				throw new IOException(e);
 			}
 			return true;
 		}
 
 		protected List<IteratorSetting> contextIterators(
-				TaskAttemptContext context, String tableName) {
+				final TaskAttemptContext context, final String tableName) {
 			return getIterators(context);
 		}
 
 		@Override
-		protected void setupIterators(TaskAttemptContext context,
-				Scanner scanner, String tableName,
-				org.apache.accumulo.core.client.mapreduce.RangeInputSplit split) {
+		protected void setupIterators(final TaskAttemptContext context,
+				final Scanner scanner, final String tableName,
+				final org.apache.accumulo.core.client.mapreduce.RangeInputSplit split) {
 			List<IteratorSetting> iterators = null;
 
 			if (null == split) {
@@ -177,13 +178,14 @@ public class GraphXEdgeInputFormat extends InputFormatBase<Object, Edge> {
 				}
 			}
 
-			for (IteratorSetting iterator : iterators)
-				scanner.addScanIterator(iterator);
+			for (final IteratorSetting iterator : iterators) {
+                scanner.addScanIterator(iterator);
+            }
 		}
 
 	}
 
-	public static long getVertexId(RyaType resource) throws IOException {
+	public static long getVertexId(final RyaType resource) throws IOException {
 		String uri = "";
 		if (resource != null) {
 			uri = resource.getData().toString();
@@ -193,20 +195,20 @@ public class GraphXEdgeInputFormat extends InputFormatBase<Object, Edge> {
 			// the digested string, the collision ratio is less than 0.0001%
 			// using custom hash function should significantly reduce the
 			// collision ratio
-			MessageDigest messageDigest = MessageDigest
+			final MessageDigest messageDigest = MessageDigest
 					.getInstance("SHA-256");
-			messageDigest.update(uri.getBytes());
-			String encryptedString = new String(messageDigest.digest());
+			messageDigest.update(uri.getBytes(StandardCharsets.UTF_8));
+			final String encryptedString = new String(messageDigest.digest(), StandardCharsets.UTF_8);
 			return hash(encryptedString);
 		}
-		catch (NoSuchAlgorithmException e) {
+		catch (final NoSuchAlgorithmException e) {
 			throw new IOException(e);
 		}
 	}
 
-	public static long hash(String string) {
+	public static long hash(final String string) {
 		long h = 1125899906842597L; // prime
-		int len = string.length();
+		final int len = string.length();
 
 		for (int i = 0; i < len; i++) {
 			h = 31 * h + string.charAt(i);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/mapreduce/src/main/java/org/apache/rya/accumulo/mr/RyaOutputFormat.java
----------------------------------------------------------------------
diff --git a/mapreduce/src/main/java/org/apache/rya/accumulo/mr/RyaOutputFormat.java b/mapreduce/src/main/java/org/apache/rya/accumulo/mr/RyaOutputFormat.java
index 5332260..0d42df2 100644
--- a/mapreduce/src/main/java/org/apache/rya/accumulo/mr/RyaOutputFormat.java
+++ b/mapreduce/src/main/java/org/apache/rya/accumulo/mr/RyaOutputFormat.java
@@ -22,6 +22,7 @@ package org.apache.rya.accumulo.mr;
 import java.io.Closeable;
 import java.io.Flushable;
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.concurrent.TimeUnit;
 
@@ -94,7 +95,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @param job Job to apply the setting to.
      * @param visibility A comma-separated list of authorizations.
      */
-    public static void setDefaultVisibility(Job job, String visibility) {
+    public static void setDefaultVisibility(final Job job, final String visibility) {
         if (visibility != null) {
             job.getConfiguration().set(CV_PROPERTY, visibility);
         }
@@ -107,7 +108,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @param job Job to apply the setting to.
      * @param context A context string, should be a syntactically valid URI.
      */
-    public static void setDefaultContext(Job job, String context) {
+    public static void setDefaultContext(final Job job, final String context) {
         if (context != null) {
             job.getConfiguration().set(CONTEXT_PROPERTY, context);
         }
@@ -118,7 +119,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @param job Job to apply the setting to.
      * @param prefix The common prefix to all rya tables that output will be written to.
      */
-    public static void setTablePrefix(Job job, String prefix) {
+    public static void setTablePrefix(final Job job, final String prefix) {
         job.getConfiguration().set(OUTPUT_PREFIX_PROPERTY, prefix);
     }
 
@@ -127,7 +128,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @param job Job to apply the setting to.
      * @param enable Whether this job should add its output statements to the free text index.
      */
-    public static void setFreeTextEnabled(Job job, boolean enable) {
+    public static void setFreeTextEnabled(final Job job, final boolean enable) {
         job.getConfiguration().setBoolean(ENABLE_FREETEXT, enable);
     }
 
@@ -136,7 +137,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @param job Job to apply the setting to.
      * @param enable Whether this job should add its output statements to the temporal index.
      */
-    public static void setTemporalEnabled(Job job, boolean enable) {
+    public static void setTemporalEnabled(final Job job, final boolean enable) {
         job.getConfiguration().setBoolean(ENABLE_TEMPORAL, enable);
     }
 
@@ -145,7 +146,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @param job Job to apply the setting to.
      * @param enable Whether this job should add its output statements to the entity-centric index.
      */
-    public static void setEntityEnabled(Job job, boolean enable) {
+    public static void setEntityEnabled(final Job job, final boolean enable) {
         job.getConfiguration().setBoolean(ENABLE_ENTITY, enable);
     }
 
@@ -154,7 +155,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @param job Job to apply the setting to.
      * @param enable Whether this job should output to the core tables.
      */
-    public static void setCoreTablesEnabled(Job job, boolean enable) {
+    public static void setCoreTablesEnabled(final Job job, final boolean enable) {
         job.getConfiguration().setBoolean(ENABLE_CORE, enable);
     }
 
@@ -163,7 +164,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @param job Job to configure
      * @param instance Name of the mock instance
      */
-    public static void setMockInstance(Job job, String instance) {
+    public static void setMockInstance(final Job job, final String instance) {
         AccumuloOutputFormat.setMockInstance(job, instance);
         job.getConfiguration().setBoolean(ConfigUtils.USE_MOCK_INSTANCE, true);
         job.getConfiguration().setBoolean(MRUtils.AC_MOCK_PROP, true);
@@ -175,8 +176,8 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @throws  IOException if initializing the core Rya indexer fails.
      */
     @Override
-    public void checkOutputSpecs(JobContext jobContext) throws IOException {
-        Configuration conf = jobContext.getConfiguration();
+    public void checkOutputSpecs(final JobContext jobContext) throws IOException {
+        final Configuration conf = jobContext.getConfiguration();
         // make sure that all of the indexers can connect
         getFreeTextIndexer(conf);
         getTemporalIndexer(conf);
@@ -189,7 +190,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @return  A committer whose method implementations are empty.
      */
     @Override
-    public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException {
+    public OutputCommitter getOutputCommitter(final TaskAttemptContext context) throws IOException, InterruptedException {
         // copied from AccumuloOutputFormat
         return new NullOutputFormat<Text, Mutation>().getOutputCommitter(context);
     }
@@ -201,16 +202,16 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
      * @throws  IOException if any enabled indexers can't be initialized
      */
     @Override
-    public RecordWriter<Writable, RyaStatementWritable> getRecordWriter(TaskAttemptContext context) throws IOException {
+    public RecordWriter<Writable, RyaStatementWritable> getRecordWriter(final TaskAttemptContext context) throws IOException {
         return new RyaRecordWriter(context);
     }
 
 
-    private static FreeTextIndexer getFreeTextIndexer(Configuration conf) throws IOException {
+    private static FreeTextIndexer getFreeTextIndexer(final Configuration conf) throws IOException {
         if (!conf.getBoolean(ENABLE_FREETEXT, true)) {
             return null;
         }
-        AccumuloFreeTextIndexer freeText = new AccumuloFreeTextIndexer();
+        final AccumuloFreeTextIndexer freeText = new AccumuloFreeTextIndexer();
         freeText.setConf(conf);
         Connector connector;
         try {
@@ -218,7 +219,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
         } catch (AccumuloException | AccumuloSecurityException e) {
             throw new IOException("Error when attempting to create a connection for writing the freeText index.", e);
         }
-        MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(new BatchWriterConfig());
+        final MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(new BatchWriterConfig());
         freeText.setConnector(connector);
         freeText.setMultiTableBatchWriter(mtbw);
         freeText.init();
@@ -226,11 +227,11 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
         return freeText;
     }
 
-    private static TemporalIndexer getTemporalIndexer(Configuration conf) throws IOException {
+    private static TemporalIndexer getTemporalIndexer(final Configuration conf) throws IOException {
         if (!conf.getBoolean(ENABLE_TEMPORAL, true)) {
             return null;
         }
-        AccumuloTemporalIndexer temporal = new AccumuloTemporalIndexer();
+        final AccumuloTemporalIndexer temporal = new AccumuloTemporalIndexer();
         temporal.setConf(conf);
         Connector connector;
         try {
@@ -238,34 +239,34 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
         } catch (AccumuloException | AccumuloSecurityException e) {
             throw new IOException("Error when attempting to create a connection for writing the temporal index.", e);
         }
-        MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(new BatchWriterConfig());
+        final MultiTableBatchWriter mtbw = connector.createMultiTableBatchWriter(new BatchWriterConfig());
         temporal.setConnector(connector);
         temporal.setMultiTableBatchWriter(mtbw);
         temporal.init();
         return temporal;
     }
 
-    private static EntityCentricIndex getEntityIndexer(Configuration conf) {
+    private static EntityCentricIndex getEntityIndexer(final Configuration conf) {
         if (!conf.getBoolean(ENABLE_ENTITY, true)) {
             return null;
         }
-        EntityCentricIndex entity = new EntityCentricIndex();
+        final EntityCentricIndex entity = new EntityCentricIndex();
         entity.setConf(conf);
         return entity;
     }
 
-    private static AccumuloRyaDAO getRyaIndexer(Configuration conf) throws IOException {
+    private static AccumuloRyaDAO getRyaIndexer(final Configuration conf) throws IOException {
         try {
             if (!conf.getBoolean(ENABLE_CORE, true)) {
                 return null;
             }
-            AccumuloRyaDAO ryaIndexer = new AccumuloRyaDAO();
-            Connector conn = ConfigUtils.getConnector(conf);
+            final AccumuloRyaDAO ryaIndexer = new AccumuloRyaDAO();
+            final Connector conn = ConfigUtils.getConnector(conf);
             ryaIndexer.setConnector(conn);
 
-            AccumuloRdfConfiguration ryaConf = new AccumuloRdfConfiguration();
+            final AccumuloRdfConfiguration ryaConf = new AccumuloRdfConfiguration();
 
-            String tablePrefix = conf.get(OUTPUT_PREFIX_PROPERTY, null);
+            final String tablePrefix = conf.get(OUTPUT_PREFIX_PROPERTY, null);
             if (tablePrefix != null) {
                 ryaConf.setTablePrefix(tablePrefix);
             }
@@ -273,13 +274,13 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
             ryaIndexer.setConf(ryaConf);
             ryaIndexer.init();
             return ryaIndexer;
-        } catch (AccumuloException e) {
+        } catch (final AccumuloException e) {
             logger.error("Cannot create RyaIndexer", e);
             throw new IOException(e);
-        } catch (AccumuloSecurityException e) {
+        } catch (final AccumuloSecurityException e) {
             logger.error("Cannot create RyaIndexer", e);
             throw new IOException(e);
-        } catch (RyaDAOException e) {
+        } catch (final RyaDAOException e) {
             logger.error("Cannot create RyaIndexer", e);
             throw new IOException(e);
         }
@@ -293,11 +294,11 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
             implements Closeable, Flushable {
         private static final Logger logger = Logger.getLogger(RyaRecordWriter.class);
 
-        private FreeTextIndexer freeTextIndexer;
-        private TemporalIndexer temporalIndexer;
-        private EntityCentricIndex entityIndexer;
-        private AccumuloRyaDAO ryaIndexer;
-        private RyaTripleContext tripleContext;
+        private final FreeTextIndexer freeTextIndexer;
+        private final TemporalIndexer temporalIndexer;
+        private final EntityCentricIndex entityIndexer;
+        private final AccumuloRyaDAO ryaIndexer;
+        private final RyaTripleContext tripleContext;
         private MultiTableBatchWriter writer;
         private byte[] cv = AccumuloRdfConstants.EMPTY_CV.getExpression();
         private RyaURI defaultContext = null;
@@ -305,10 +306,10 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
         private static final long ONE_MEGABYTE = 1024L * 1024L;
         private static final long AVE_STATEMENT_SIZE = 100L;
 
-        private long bufferSizeLimit;
+        private final long bufferSizeLimit;
         private long bufferCurrentSize = 0;
 
-        private ArrayList<RyaStatement> buffer;
+        private final ArrayList<RyaStatement> buffer;
 
         /**
          * Constructor.
@@ -316,7 +317,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
          * @throws  IOException if the core Rya indexer or entity indexer can't
          *          be initialized
          */
-        public RyaRecordWriter(TaskAttemptContext context) throws IOException {
+        public RyaRecordWriter(final TaskAttemptContext context) throws IOException {
             this(context.getConfiguration());
         }
 
@@ -326,21 +327,21 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
          * @throws  IOException if the core Rya indexer or entity indexer can't
          *          be initialized
          */
-        public RyaRecordWriter(Configuration conf) throws IOException {
+        public RyaRecordWriter(final Configuration conf) throws IOException {
             // set the visibility
-            String visibility = conf.get(CV_PROPERTY);
+            final String visibility = conf.get(CV_PROPERTY);
             if (visibility != null) {
-                cv = visibility.getBytes();
+                cv = visibility.getBytes(StandardCharsets.UTF_8);
             }
             // set the default context
-            String context = conf.get(CONTEXT_PROPERTY, "");
+            final String context = conf.get(CONTEXT_PROPERTY, "");
             if (context != null && !context.isEmpty()) {
                 defaultContext = new RyaURI(context);
             }
 
             // set up the buffer
             bufferSizeLimit = conf.getLong(MAX_MUTATION_BUFFER_SIZE, ONE_MEGABYTE);
-            int bufferCapacity = (int) (bufferSizeLimit / AVE_STATEMENT_SIZE);
+            final int bufferCapacity = (int) (bufferSizeLimit / AVE_STATEMENT_SIZE);
             buffer = new ArrayList<RyaStatement>(bufferCapacity);
 
             // set up the indexers
@@ -358,7 +359,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
                 } catch (AccumuloException | AccumuloSecurityException e) {
                     throw new IOException("Error connecting to Accumulo for entity index output", e);
                 }
-                BatchWriterConfig batchWriterConfig = new BatchWriterConfig();
+                final BatchWriterConfig batchWriterConfig = new BatchWriterConfig();
                 batchWriterConfig.setMaxMemory(RdfCloudTripleStoreConstants.MAX_MEMORY);
                 batchWriterConfig.setTimeout(RdfCloudTripleStoreConstants.MAX_TIME, TimeUnit.MILLISECONDS);
                 batchWriterConfig.setMaxWriteThreads(RdfCloudTripleStoreConstants.NUM_THREADS);
@@ -396,41 +397,45 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
          * @param   paramTaskAttemptContext     Unused.
          */
         @Override
-        public void close(TaskAttemptContext paramTaskAttemptContext) {
+        public void close(final TaskAttemptContext paramTaskAttemptContext) {
             // close everything. log errors
             try {
                 flush();
-            } catch (IOException e) {
+            } catch (final IOException e) {
                 logger.error("Error flushing the buffer on RyaOutputFormat Close", e);
             }
             try {
-                if (freeTextIndexer != null)
+                if (freeTextIndexer != null) {
                     freeTextIndexer.close();
-            } catch (IOException e) {
+                }
+            } catch (final IOException e) {
                 logger.error("Error closing the freetextIndexer on RyaOutputFormat Close", e);
             }
             try {
-                if (temporalIndexer != null)
+                if (temporalIndexer != null) {
                     temporalIndexer.close();
-            } catch (IOException e) {
+                }
+            } catch (final IOException e) {
                 logger.error("Error closing the temporalIndexer on RyaOutputFormat Close", e);
             }
             try {
-                if (entityIndexer != null)
+                if (entityIndexer != null) {
                     entityIndexer.close();
-            } catch (IOException e) {
+                }
+            } catch (final IOException e) {
                 logger.error("Error closing the entityIndexer on RyaOutputFormat Close", e);
             }
             try {
-                if (ryaIndexer != null)
+                if (ryaIndexer != null) {
                     ryaIndexer.destroy();
-            } catch (RyaDAOException e) {
+                }
+            } catch (final RyaDAOException e) {
                 logger.error("Error closing RyaDAO on RyaOutputFormat Close", e);
             }
             if (writer != null) {
                 try {
                     writer.close();
-                } catch (MutationsRejectedException e) {
+                } catch (final MutationsRejectedException e) {
                     logger.error("Error closing MultiTableBatchWriter on RyaOutputFormat Close", e);
                 }
             }
@@ -442,7 +447,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
          * @param   statement   Statement to insert to Rya.
          * @throws  IOException if writing to Accumulo fails.
          */
-        public void write(Statement statement) throws IOException {
+        public void write(final Statement statement) throws IOException {
             write(RdfToRyaConversions.convertStatement(statement));
         }
 
@@ -452,7 +457,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
          * @param   ryaStatement   Statement to insert to Rya.
          * @throws  IOException if writing to Accumulo fails.
          */
-        public void write(RyaStatement ryaStatement) throws IOException {
+        public void write(final RyaStatement ryaStatement) throws IOException {
             write(NullWritable.get(), new RyaStatementWritable(ryaStatement, tripleContext));
         }
 
@@ -464,8 +469,8 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
          * @throws  IOException if writing to Accumulo fails.
          */
         @Override
-        public void write(Writable key, RyaStatementWritable value) throws IOException {
-            RyaStatement ryaStatement = value.getRyaStatement();
+        public void write(final Writable key, final RyaStatementWritable value) throws IOException {
+            final RyaStatement ryaStatement = value.getRyaStatement();
             if (ryaStatement.getColumnVisibility() == null) {
                 ryaStatement.setColumnVisibility(cv);
             }
@@ -479,11 +484,11 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
             }
         }
 
-        private int statementSize(RyaStatement ryaStatement) {
-            RyaURI subject = ryaStatement.getSubject();
-            RyaURI predicate = ryaStatement.getPredicate();
-            RyaType object = ryaStatement.getObject();
-            RyaURI context = ryaStatement.getContext();
+        private int statementSize(final RyaStatement ryaStatement) {
+            final RyaURI subject = ryaStatement.getSubject();
+            final RyaURI predicate = ryaStatement.getPredicate();
+            final RyaType object = ryaStatement.getObject();
+            final RyaURI context = ryaStatement.getContext();
             int size = 3 + subject.getData().length() + predicate.getData().length() + object.getData().length();
             if (!XMLSchema.ANYURI.equals(object.getDataType())) {
                 size += 2 + object.getDataType().toString().length();
@@ -508,15 +513,15 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
             totalCommitRecords += buffer.size();
             commitCount++;
 
-            long startCommitTime = System.currentTimeMillis();
+            final long startCommitTime = System.currentTimeMillis();
 
             logger.info(String.format("(C-%d) Flushing buffer with %,d objects and %,d bytes", commitCount, buffer.size(),
                     bufferCurrentSize));
 
-            double readingDuration = (startCommitTime - lastCommitFinishTime) / 1000.;
+            final double readingDuration = (startCommitTime - lastCommitFinishTime) / 1000.;
             totalReadDuration += readingDuration;
-            double currentReadRate = buffer.size() / readingDuration;
-            double totalReadRate = totalCommitRecords / totalReadDuration;
+            final double currentReadRate = buffer.size() / readingDuration;
+            final double totalReadRate = totalCommitRecords / totalReadDuration;
 
             // Print "reading" metrics
             logger.info(String.format("(C-%d) (Reading) Duration, Current Rate, Total Rate: %.2f %.2f %.2f ", commitCount, readingDuration,
@@ -539,7 +544,7 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
                 entityIndexer.storeStatements(buffer);
                 try {
                     writer.flush();
-                } catch (MutationsRejectedException e) {
+                } catch (final MutationsRejectedException e) {
                     throw new IOException("Error flushing data to Accumulo for entity indexing", e);
                 }
             }
@@ -549,26 +554,26 @@ public class RyaOutputFormat extends OutputFormat<Writable, RyaStatementWritable
                 if (ryaIndexer != null) {
                     ryaIndexer.add(buffer.iterator());
                 }
-            } catch (RyaDAOException e) {
+            } catch (final RyaDAOException e) {
                 logger.error("Cannot write statement to Rya", e);
                 throw new IOException(e);
             }
 
             lastCommitFinishTime = System.currentTimeMillis();
 
-            double writingDuration = (lastCommitFinishTime - startCommitTime) / 1000.;
+            final double writingDuration = (lastCommitFinishTime - startCommitTime) / 1000.;
             totalWriteDuration += writingDuration;
-            double currentWriteRate = buffer.size() / writingDuration;
-            double totalWriteRate = totalCommitRecords / totalWriteDuration;
+            final double currentWriteRate = buffer.size() / writingDuration;
+            final double totalWriteRate = totalCommitRecords / totalWriteDuration;
 
             // Print "writing" stats
             logger.info(String.format("(C-%d) (Writing) Duration, Current Rate, Total Rate: %.2f %.2f %.2f ", commitCount, writingDuration,
                     currentWriteRate, totalWriteRate));
 
-            double processDuration = writingDuration + readingDuration;
-            double totalProcessDuration = totalWriteDuration + totalReadDuration;
-            double currentProcessRate = buffer.size() / processDuration;
-            double totalProcessRate = totalCommitRecords / (totalProcessDuration);
+            final double processDuration = writingDuration + readingDuration;
+            final double totalProcessDuration = totalWriteDuration + totalReadDuration;
+            final double currentProcessRate = buffer.size() / processDuration;
+            final double totalProcessRate = totalCommitRecords / (totalProcessDuration);
 
             // Print "total" stats
             logger.info(String.format("(C-%d) (Total) Duration, Current Rate, Total Rate: %.2f %.2f %.2f ", commitCount, processDuration,


[06/11] incubator-rya git commit: RYA-401 Fixed all default charset bugs. Closes #243.

Posted by ca...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java b/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java
index 9688fec..5907b5b 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowHashedTripleResolver.java
@@ -1,18 +1,3 @@
-package org.apache.rya.api.resolver.triple.impl;
-
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTE;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.EMPTY_BYTES;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
-
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.codec.binary.Hex;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -21,9 +6,9 @@ import org.apache.commons.codec.binary.Hex;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -31,11 +16,21 @@ import org.apache.commons.codec.binary.Hex;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.resolver.triple.impl;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTE;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.EMPTY_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
 
+import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
 
-import com.google.common.primitives.Bytes;
-
+import org.apache.commons.codec.binary.Hex;
 import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
 import org.apache.rya.api.domain.RyaStatement;
 import org.apache.rya.api.domain.RyaType;
@@ -46,6 +41,8 @@ import org.apache.rya.api.resolver.triple.TripleRow;
 import org.apache.rya.api.resolver.triple.TripleRowResolver;
 import org.apache.rya.api.resolver.triple.TripleRowResolverException;
 
+import com.google.common.primitives.Bytes;
+
 /**
  * Will store triple in spo, po, osp. Storing everything in the whole row.
  * Date: 7/13/12
@@ -54,33 +51,33 @@ import org.apache.rya.api.resolver.triple.TripleRowResolverException;
 public class WholeRowHashedTripleResolver implements TripleRowResolver {
 
     @Override
-    public Map<TABLE_LAYOUT, TripleRow> serialize(RyaStatement stmt) throws TripleRowResolverException {
+    public Map<TABLE_LAYOUT, TripleRow> serialize(final RyaStatement stmt) throws TripleRowResolverException {
         try {
-            RyaURI subject = stmt.getSubject();
-            RyaURI predicate = stmt.getPredicate();
-            RyaType object = stmt.getObject();
-            RyaURI context = stmt.getContext();
-            Long timestamp = stmt.getTimestamp();
-            byte[] columnVisibility = stmt.getColumnVisibility();
-            String qualifer = stmt.getQualifer();
-            byte[] qualBytes = qualifer == null ? EMPTY_BYTES : qualifer.getBytes();
-            byte[] value = stmt.getValue();
+            final RyaURI subject = stmt.getSubject();
+            final RyaURI predicate = stmt.getPredicate();
+            final RyaType object = stmt.getObject();
+            final RyaURI context = stmt.getContext();
+            final Long timestamp = stmt.getTimestamp();
+            final byte[] columnVisibility = stmt.getColumnVisibility();
+            final String qualifer = stmt.getQualifer();
+            final byte[] qualBytes = qualifer == null ? EMPTY_BYTES : qualifer.getBytes(StandardCharsets.UTF_8);
+            final byte[] value = stmt.getValue();
             assert subject != null && predicate != null && object != null;
-            byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes();
-            Map<TABLE_LAYOUT, TripleRow> tripleRowMap = new HashMap<TABLE_LAYOUT, TripleRow>();
-            MessageDigest md = MessageDigest.getInstance("MD5");
-            byte[] subjBytes = subject.getData().getBytes();
-            byte[] subjHashBytes = md.digest(subjBytes);
-            byte[] predBytes = predicate.getData().getBytes();
-            byte[] predHashBytes = md.digest(predBytes);
-            byte[][] objBytes = RyaContext.getInstance().serializeType(object);
+            final byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes(StandardCharsets.UTF_8);
+            final Map<TABLE_LAYOUT, TripleRow> tripleRowMap = new HashMap<TABLE_LAYOUT, TripleRow>();
+            final MessageDigest md = MessageDigest.getInstance("MD5");
+            final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+            final byte[] subjHashBytes = md.digest(subjBytes);
+            final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
+            final byte[] predHashBytes = md.digest(predBytes);
+            final byte[][] objBytes = RyaContext.getInstance().serializeType(object);
             tripleRowMap.put(TABLE_LAYOUT.SPO,
-                    new TripleRow(Bytes.concat(Hex.encodeHexString(subjHashBytes).getBytes(), DELIM_BYTES, subjBytes, DELIM_BYTES,
+                    new TripleRow(Bytes.concat(Hex.encodeHexString(subjHashBytes).getBytes(StandardCharsets.UTF_8), DELIM_BYTES, subjBytes, DELIM_BYTES,
                             predBytes, DELIM_BYTES,
                             objBytes[0], objBytes[1]), cf, qualBytes,
                             timestamp, columnVisibility, value));
             tripleRowMap.put(TABLE_LAYOUT.PO,
-                    new TripleRow(Bytes.concat(Hex.encodeHexString(predHashBytes).getBytes(), DELIM_BYTES, predBytes, DELIM_BYTES,
+                    new TripleRow(Bytes.concat(Hex.encodeHexString(predHashBytes).getBytes(StandardCharsets.UTF_8), DELIM_BYTES, predBytes, DELIM_BYTES,
                             objBytes[0], DELIM_BYTES,
                             subjBytes, objBytes[1]), cf, qualBytes,
                             timestamp, columnVisibility, value));
@@ -90,68 +87,68 @@ public class WholeRowHashedTripleResolver implements TripleRowResolver {
                             predBytes, objBytes[1]), cf, qualBytes,
                             timestamp, columnVisibility, value));
             return tripleRowMap;
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new TripleRowResolverException(e);
-        } catch (NoSuchAlgorithmException e) {
+        } catch (final NoSuchAlgorithmException e) {
         	throw new TripleRowResolverException(e);
 		}
     }
 
     @Override
-    public RyaStatement deserialize(TABLE_LAYOUT table_layout, TripleRow tripleRow) throws TripleRowResolverException {
+    public RyaStatement deserialize(final TABLE_LAYOUT table_layout, final TripleRow tripleRow) throws TripleRowResolverException {
         try {
             assert tripleRow != null && table_layout != null;
             byte[] row = tripleRow.getRow();
-            
+
             // if it is a hashed row, ony keep the row after the hash
             if ((table_layout == TABLE_LAYOUT.SPO) || (table_layout == TABLE_LAYOUT.PO)) {
-            	int hashStart = Bytes.indexOf(row, DELIM_BYTE);
+            	final int hashStart = Bytes.indexOf(row, DELIM_BYTE);
             	row = Arrays.copyOfRange(row, hashStart + 1, row.length);
             }
-            
-            int firstIndex = Bytes.indexOf(row, DELIM_BYTE); 
-            byte[] first= Arrays.copyOf(row, firstIndex);
-            int secondIndex = Bytes.lastIndexOf(row, DELIM_BYTE);
-            int typeIndex = Bytes.indexOf(row, TYPE_DELIM_BYTE);
-            byte[] second = Arrays.copyOfRange(row, firstIndex + 1, secondIndex);
-            byte[] third = Arrays.copyOfRange(row, secondIndex + 1, typeIndex);
-            byte[] type = Arrays.copyOfRange(row, typeIndex, row.length);
-            byte[] columnFamily = tripleRow.getColumnFamily();
-            boolean contextExists = columnFamily != null && columnFamily.length > 0;
-            RyaURI context = (contextExists) ? (new RyaURI(new String(columnFamily))) : null;
-            byte[] columnQualifier = tripleRow.getColumnQualifier();
-            String qualifier = columnQualifier != null && columnQualifier.length > 0 ? new String(columnQualifier) : null;
-            Long timestamp = tripleRow.getTimestamp();
-            byte[] columnVisibility = tripleRow.getColumnVisibility();
-            byte[] value = tripleRow.getValue();
+
+            final int firstIndex = Bytes.indexOf(row, DELIM_BYTE);
+            final byte[] first= Arrays.copyOf(row, firstIndex);
+            final int secondIndex = Bytes.lastIndexOf(row, DELIM_BYTE);
+            final int typeIndex = Bytes.indexOf(row, TYPE_DELIM_BYTE);
+            final byte[] second = Arrays.copyOfRange(row, firstIndex + 1, secondIndex);
+            final byte[] third = Arrays.copyOfRange(row, secondIndex + 1, typeIndex);
+            final byte[] type = Arrays.copyOfRange(row, typeIndex, row.length);
+            final byte[] columnFamily = tripleRow.getColumnFamily();
+            final boolean contextExists = columnFamily != null && columnFamily.length > 0;
+            final RyaURI context = (contextExists) ? (new RyaURI(new String(columnFamily, StandardCharsets.UTF_8))) : null;
+            final byte[] columnQualifier = tripleRow.getColumnQualifier();
+            final String qualifier = columnQualifier != null && columnQualifier.length > 0 ? new String(columnQualifier, StandardCharsets.UTF_8) : null;
+            final Long timestamp = tripleRow.getTimestamp();
+            final byte[] columnVisibility = tripleRow.getColumnVisibility();
+            final byte[] value = tripleRow.getValue();
 
             switch (table_layout) {
                 case SPO: {
-                    byte[] obj = Bytes.concat(third, type);
+                    final byte[] obj = Bytes.concat(third, type);
                     return new RyaStatement(
-                            new RyaURI(new String(first)),
-                            new RyaURI(new String(second)),
+                            new RyaURI(new String(first, StandardCharsets.UTF_8)),
+                            new RyaURI(new String(second, StandardCharsets.UTF_8)),
                             RyaContext.getInstance().deserialize(obj),
                             context, qualifier, columnVisibility, value, timestamp);
                 }
                 case PO: {
-                    byte[] obj = Bytes.concat(second, type);
+                    final byte[] obj = Bytes.concat(second, type);
                     return new RyaStatement(
-                            new RyaURI(new String(third)),
-                            new RyaURI(new String(first)),
+                            new RyaURI(new String(third, StandardCharsets.UTF_8)),
+                            new RyaURI(new String(first, StandardCharsets.UTF_8)),
                             RyaContext.getInstance().deserialize(obj),
                             context, qualifier, columnVisibility, value, timestamp);
                 }
                 case OSP: {
-                    byte[] obj = Bytes.concat(first, type);
+                    final byte[] obj = Bytes.concat(first, type);
                     return new RyaStatement(
-                            new RyaURI(new String(second)),
-                            new RyaURI(new String(third)),
+                            new RyaURI(new String(second, StandardCharsets.UTF_8)),
+                            new RyaURI(new String(third, StandardCharsets.UTF_8)),
                             RyaContext.getInstance().deserialize(obj),
                             context, qualifier, columnVisibility, value, timestamp);
                 }
             }
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new TripleRowResolverException(e);
         }
         throw new TripleRowResolverException("TripleRow[" + tripleRow + "] with Table layout[" + table_layout + "] is not deserializable");

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowTripleResolver.java
----------------------------------------------------------------------
diff --git a/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowTripleResolver.java b/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowTripleResolver.java
index 792d02e..cbd65b6 100644
--- a/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowTripleResolver.java
+++ b/common/rya.api/src/main/java/org/apache/rya/api/resolver/triple/impl/WholeRowTripleResolver.java
@@ -1,5 +1,3 @@
-package org.apache.rya.api.resolver.triple.impl;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.api.resolver.triple.impl;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,10 +16,19 @@ package org.apache.rya.api.resolver.triple.impl;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.api.resolver.triple.impl;
 
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTE;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.EMPTY_BYTES;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
 
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
 
-import com.google.common.primitives.Bytes;
+import org.apache.rya.api.RdfCloudTripleStoreConstants.TABLE_LAYOUT;
 import org.apache.rya.api.domain.RyaStatement;
 import org.apache.rya.api.domain.RyaType;
 import org.apache.rya.api.domain.RyaURI;
@@ -31,11 +38,7 @@ import org.apache.rya.api.resolver.triple.TripleRow;
 import org.apache.rya.api.resolver.triple.TripleRowResolver;
 import org.apache.rya.api.resolver.triple.TripleRowResolverException;
 
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.*;
+import com.google.common.primitives.Bytes;
 
 /**
  * Will store triple in spo, po, osp. Storing everything in the whole row.
@@ -45,23 +48,23 @@ import static org.apache.rya.api.RdfCloudTripleStoreConstants.*;
 public class WholeRowTripleResolver implements TripleRowResolver {
 
     @Override
-    public Map<TABLE_LAYOUT, TripleRow> serialize(RyaStatement stmt) throws TripleRowResolverException {
+    public Map<TABLE_LAYOUT, TripleRow> serialize(final RyaStatement stmt) throws TripleRowResolverException {
         try {
-            RyaURI subject = stmt.getSubject();
-            RyaURI predicate = stmt.getPredicate();
-            RyaType object = stmt.getObject();
-            RyaURI context = stmt.getContext();
-            Long timestamp = stmt.getTimestamp();
-            byte[] columnVisibility = stmt.getColumnVisibility();
-            String qualifer = stmt.getQualifer();
-            byte[] qualBytes = qualifer == null ? EMPTY_BYTES : qualifer.getBytes();
-            byte[] value = stmt.getValue();
+            final RyaURI subject = stmt.getSubject();
+            final RyaURI predicate = stmt.getPredicate();
+            final RyaType object = stmt.getObject();
+            final RyaURI context = stmt.getContext();
+            final Long timestamp = stmt.getTimestamp();
+            final byte[] columnVisibility = stmt.getColumnVisibility();
+            final String qualifer = stmt.getQualifer();
+            final byte[] qualBytes = qualifer == null ? EMPTY_BYTES : qualifer.getBytes(StandardCharsets.UTF_8);
+            final byte[] value = stmt.getValue();
             assert subject != null && predicate != null && object != null;
-            byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes();
-            Map<TABLE_LAYOUT, TripleRow> tripleRowMap = new HashMap<TABLE_LAYOUT, TripleRow>();
-            byte[] subjBytes = subject.getData().getBytes();
-            byte[] predBytes = predicate.getData().getBytes();
-            byte[][] objBytes = RyaContext.getInstance().serializeType(object);
+            final byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes(StandardCharsets.UTF_8);
+            final Map<TABLE_LAYOUT, TripleRow> tripleRowMap = new HashMap<TABLE_LAYOUT, TripleRow>();
+            final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+            final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
+            final byte[][] objBytes = RyaContext.getInstance().serializeType(object);
             tripleRowMap.put(TABLE_LAYOUT.SPO,
                     new TripleRow(Bytes.concat(subjBytes, DELIM_BYTES,
                             predBytes, DELIM_BYTES,
@@ -78,59 +81,59 @@ public class WholeRowTripleResolver implements TripleRowResolver {
                             predBytes, objBytes[1]), cf, qualBytes,
                             timestamp, columnVisibility, value));
             return tripleRowMap;
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new TripleRowResolverException(e);
         }
     }
 
     @Override
-    public RyaStatement deserialize(TABLE_LAYOUT table_layout, TripleRow tripleRow) throws TripleRowResolverException {
+    public RyaStatement deserialize(final TABLE_LAYOUT table_layout, final TripleRow tripleRow) throws TripleRowResolverException {
         try {
             assert tripleRow != null && table_layout != null;
-            byte[] row = tripleRow.getRow();
-            int firstIndex = Bytes.indexOf(row, DELIM_BYTE);
-            int secondIndex = Bytes.lastIndexOf(row, DELIM_BYTE);
-            int typeIndex = Bytes.indexOf(row, TYPE_DELIM_BYTE);
-            byte[] first = Arrays.copyOf(row, firstIndex);
-            byte[] second = Arrays.copyOfRange(row, firstIndex + 1, secondIndex);
-            byte[] third = Arrays.copyOfRange(row, secondIndex + 1, typeIndex);
-            byte[] type = Arrays.copyOfRange(row, typeIndex, row.length);
-            byte[] columnFamily = tripleRow.getColumnFamily();
-            boolean contextExists = columnFamily != null && columnFamily.length > 0;
-            RyaURI context = (contextExists) ? (new RyaURI(new String(columnFamily))) : null;
-            byte[] columnQualifier = tripleRow.getColumnQualifier();
-            String qualifier = columnQualifier != null && columnQualifier.length > 0 ? new String(columnQualifier) : null;
-            Long timestamp = tripleRow.getTimestamp();
-            byte[] columnVisibility = tripleRow.getColumnVisibility();
-            byte[] value = tripleRow.getValue();
+            final byte[] row = tripleRow.getRow();
+            final int firstIndex = Bytes.indexOf(row, DELIM_BYTE);
+            final int secondIndex = Bytes.lastIndexOf(row, DELIM_BYTE);
+            final int typeIndex = Bytes.indexOf(row, TYPE_DELIM_BYTE);
+            final byte[] first = Arrays.copyOf(row, firstIndex);
+            final byte[] second = Arrays.copyOfRange(row, firstIndex + 1, secondIndex);
+            final byte[] third = Arrays.copyOfRange(row, secondIndex + 1, typeIndex);
+            final byte[] type = Arrays.copyOfRange(row, typeIndex, row.length);
+            final byte[] columnFamily = tripleRow.getColumnFamily();
+            final boolean contextExists = columnFamily != null && columnFamily.length > 0;
+            final RyaURI context = (contextExists) ? (new RyaURI(new String(columnFamily, StandardCharsets.UTF_8))) : null;
+            final byte[] columnQualifier = tripleRow.getColumnQualifier();
+            final String qualifier = columnQualifier != null && columnQualifier.length > 0 ? new String(columnQualifier, StandardCharsets.UTF_8) : null;
+            final Long timestamp = tripleRow.getTimestamp();
+            final byte[] columnVisibility = tripleRow.getColumnVisibility();
+            final byte[] value = tripleRow.getValue();
 
             switch (table_layout) {
                 case SPO: {
-                    byte[] obj = Bytes.concat(third, type);
+                    final byte[] obj = Bytes.concat(third, type);
                     return new RyaStatement(
-                            new RyaURI(new String(first)),
-                            new RyaURI(new String(second)),
+                            new RyaURI(new String(first, StandardCharsets.UTF_8)),
+                            new RyaURI(new String(second, StandardCharsets.UTF_8)),
                             RyaContext.getInstance().deserialize(obj),
                             context, qualifier, columnVisibility, value, timestamp);
                 }
                 case PO: {
-                    byte[] obj = Bytes.concat(second, type);
+                    final byte[] obj = Bytes.concat(second, type);
                     return new RyaStatement(
-                            new RyaURI(new String(third)),
-                            new RyaURI(new String(first)),
+                            new RyaURI(new String(third, StandardCharsets.UTF_8)),
+                            new RyaURI(new String(first, StandardCharsets.UTF_8)),
                             RyaContext.getInstance().deserialize(obj),
                             context, qualifier, columnVisibility, value, timestamp);
                 }
                 case OSP: {
-                    byte[] obj = Bytes.concat(first, type);
+                    final byte[] obj = Bytes.concat(first, type);
                     return new RyaStatement(
-                            new RyaURI(new String(second)),
-                            new RyaURI(new String(third)),
+                            new RyaURI(new String(second, StandardCharsets.UTF_8)),
+                            new RyaURI(new String(third, StandardCharsets.UTF_8)),
                             RyaContext.getInstance().deserialize(obj),
                             context, qualifier, columnVisibility, value, timestamp);
                 }
             }
-        } catch (RyaTypeResolverException e) {
+        } catch (final RyaTypeResolverException e) {
             throw new TripleRowResolverException(e);
         }
         throw new TripleRowResolverException("TripleRow[" + tripleRow + "] with Table layout[" + table_layout + "] is not deserializable");

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloNamespaceTableIterator.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloNamespaceTableIterator.java b/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloNamespaceTableIterator.java
index 0f1fd3d..9f6c1dd 100644
--- a/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloNamespaceTableIterator.java
+++ b/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloNamespaceTableIterator.java
@@ -1,5 +1,3 @@
-package org.apache.rya.accumulo;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.accumulo;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,28 +16,30 @@ package org.apache.rya.accumulo;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.accumulo;
 
+import java.io.IOError;
+import java.nio.charset.StandardCharsets;
+import java.util.Iterator;
+import java.util.Map.Entry;
 
-
-import com.google.common.base.Preconditions;
-import info.aduna.iteration.CloseableIteration;
-import org.apache.rya.api.persist.RdfDAOException;
 import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Value;
+import org.apache.rya.api.persist.RdfDAOException;
 import org.openrdf.model.Namespace;
 import org.openrdf.model.impl.NamespaceImpl;
 
-import java.io.IOError;
-import java.util.Iterator;
-import java.util.Map.Entry;
+import com.google.common.base.Preconditions;
+
+import info.aduna.iteration.CloseableIteration;
 
 public class AccumuloNamespaceTableIterator<T extends Namespace> implements
         CloseableIteration<Namespace, RdfDAOException> {
 
     private boolean open = false;
-    private Iterator<Entry<Key, Value>> result;
+    private final Iterator<Entry<Key, Value>> result;
 
-    public AccumuloNamespaceTableIterator(Iterator<Entry<Key, Value>> result) throws RdfDAOException {
+    public AccumuloNamespaceTableIterator(final Iterator<Entry<Key, Value>> result) throws RdfDAOException {
         Preconditions.checkNotNull(result);
         open = true;
         this.result = result;
@@ -50,7 +50,7 @@ public class AccumuloNamespaceTableIterator<T extends Namespace> implements
         try {
             verifyIsOpen();
             open = false;
-        } catch (IOError e) {
+        } catch (final IOError e) {
             throw new RdfDAOException(e);
         }
     }
@@ -75,15 +75,15 @@ public class AccumuloNamespaceTableIterator<T extends Namespace> implements
         return null;
     }
 
-    public static Namespace getNamespace(Iterator<Entry<Key, Value>> rowResults) {
+    public static Namespace getNamespace(final Iterator<Entry<Key, Value>> rowResults) {
         for (; rowResults.hasNext(); ) {
-            Entry<Key, Value> next = rowResults.next();
-            Key key = next.getKey();
-            Value val = next.getValue();
-            String cf = key.getColumnFamily().toString();
-            String cq = key.getColumnQualifier().toString();
-            return new NamespaceImpl(key.getRow().toString(), new String(
-                    val.get()));
+            final Entry<Key, Value> next = rowResults.next();
+            final Key key = next.getKey();
+            final Value val = next.getValue();
+            final String cf = key.getColumnFamily().toString();
+            final String cq = key.getColumnQualifier().toString();
+            return new NamespaceImpl(key.getRow().toString(),
+                    new String(val.get(), StandardCharsets.UTF_8));
         }
         return null;
     }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRdfEvalStatsDAO.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRdfEvalStatsDAO.java b/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRdfEvalStatsDAO.java
index 10584c5..f47b4b3 100644
--- a/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRdfEvalStatsDAO.java
+++ b/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRdfEvalStatsDAO.java
@@ -1,5 +1,3 @@
-package org.apache.rya.accumulo;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.accumulo;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,29 +16,24 @@ package org.apache.rya.accumulo;
  * specific language governing permissions and limitations
  * under the License.
  */
-
-
+package org.apache.rya.accumulo;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.PRED_CF_TXT;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.SUBJECT_CF_TXT;
-import static org.apache.rya.api.RdfCloudTripleStoreConstants.SUBJECTPRED_CF_TXT;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.PREDOBJECT_CF_TXT;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.PRED_CF_TXT;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.SUBJECTOBJECT_CF_TXT;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.SUBJECTPRED_CF_TXT;
+import static org.apache.rya.api.RdfCloudTripleStoreConstants.SUBJECT_CF_TXT;
 
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.rya.api.RdfCloudTripleStoreStatement;
-import org.apache.rya.api.layout.TableLayoutStrategy;
-import org.apache.rya.api.persist.RdfDAOException;
-import org.apache.rya.api.persist.RdfEvalStatsDAO;
-
 import org.apache.accumulo.core.client.Connector;
 import org.apache.accumulo.core.client.Scanner;
 import org.apache.accumulo.core.client.admin.TableOperations;
@@ -48,11 +41,15 @@ import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.hadoop.io.Text;
+import org.apache.rya.api.RdfCloudTripleStoreStatement;
+import org.apache.rya.api.layout.TableLayoutStrategy;
+import org.apache.rya.api.persist.RdfDAOException;
+import org.apache.rya.api.persist.RdfEvalStatsDAO;
 import org.openrdf.model.Resource;
 import org.openrdf.model.Value;
 
 /**
- * Class CloudbaseRdfEvalStatsDAO
+ * Class AccumuloRdfEvalStatsDAO
  * Date: Feb 28, 2012
  * Time: 5:03:16 PM
  */
@@ -61,7 +58,7 @@ public class AccumuloRdfEvalStatsDAO implements RdfEvalStatsDAO<AccumuloRdfConfi
     private boolean initialized = false;
     private AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
 
-    private Collection<RdfCloudTripleStoreStatement> statements = new ArrayList<RdfCloudTripleStoreStatement>();
+    private final Collection<RdfCloudTripleStoreStatement> statements = new ArrayList<RdfCloudTripleStoreStatement>();
     private Connector connector;
 
     //    private String evalTable = TBL_EVAL;
@@ -78,18 +75,18 @@ public class AccumuloRdfEvalStatsDAO implements RdfEvalStatsDAO<AccumuloRdfConfi
 //            evalTable = conf.get(RdfCloudTripleStoreConfiguration.CONF_TBL_EVAL, evalTable);
 //            conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_EVAL, evalTable);
 
-            TableOperations tos = connector.tableOperations();
+            final TableOperations tos = connector.tableOperations();
             AccumuloRdfUtils.createTableIfNotExist(tos, tableLayoutStrategy.getEval());
 //            boolean tableExists = tos.exists(evalTable);
 //            if (!tableExists)
 //                tos.create(evalTable);
             initialized = true;
-        } catch (Exception e) {
+        } catch (final Exception e) {
             throw new RdfDAOException(e);
         }
     }
 
- 
+
     @Override
     public void destroy() throws RdfDAOException {
         if (!isInitialized()) {
@@ -107,25 +104,27 @@ public class AccumuloRdfEvalStatsDAO implements RdfEvalStatsDAO<AccumuloRdfConfi
         return connector;
     }
 
-    public void setConnector(Connector connector) {
+    public void setConnector(final Connector connector) {
         this.connector = connector;
     }
 
+    @Override
     public AccumuloRdfConfiguration getConf() {
         return conf;
     }
 
-    public void setConf(AccumuloRdfConfiguration conf) {
+    @Override
+    public void setConf(final AccumuloRdfConfiguration conf) {
         this.conf = conf;
     }
 
-	@Override
-	public double getCardinality(AccumuloRdfConfiguration conf,
-			org.apache.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF card,
-			List<Value> val, Resource context) throws RdfDAOException {
+    @Override
+    public double getCardinality(final AccumuloRdfConfiguration conf,
+            final org.apache.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF card, final List<Value> val,
+            final Resource context) throws RdfDAOException {
         try {
-            Authorizations authorizations = conf.getAuthorizations();
-            Scanner scanner = connector.createScanner(tableLayoutStrategy.getEval(), authorizations);
+            final Authorizations authorizations = conf.getAuthorizations();
+            final Scanner scanner = connector.createScanner(tableLayoutStrategy.getEval(), authorizations);
             Text cfTxt = null;
             if (CARDINALITY_OF.SUBJECT.equals(card)) {
                 cfTxt = SUBJECT_CF_TXT;
@@ -140,34 +139,36 @@ public class AccumuloRdfEvalStatsDAO implements RdfEvalStatsDAO<AccumuloRdfConfi
                 cfTxt = SUBJECTPRED_CF_TXT;
             } else if (CARDINALITY_OF.PREDICATEOBJECT.equals(card)) {
                 cfTxt = PREDOBJECT_CF_TXT;
-            } else throw new IllegalArgumentException("Not right Cardinality[" + card + "]");
+            } else {
+                throw new IllegalArgumentException("Not right Cardinality[" + card + "]");
+            }
             Text cq = EMPTY_TEXT;
             if (context != null) {
-                cq = new Text(context.stringValue().getBytes());
+                cq = new Text(context.stringValue().getBytes(StandardCharsets.UTF_8));
             }
             scanner.fetchColumn(cfTxt, cq);
-            Iterator<Value> vals = val.iterator();
+            final Iterator<Value> vals = val.iterator();
             String compositeIndex = vals.next().stringValue();
             while (vals.hasNext()){
             	compositeIndex += DELIM + vals.next().stringValue();
             }
-            scanner.setRange(new Range(new Text(compositeIndex.getBytes())));
-            Iterator<Map.Entry<Key, org.apache.accumulo.core.data.Value>> iter = scanner.iterator();
+            scanner.setRange(new Range(new Text(compositeIndex.getBytes(StandardCharsets.UTF_8))));
+            final Iterator<Map.Entry<Key, org.apache.accumulo.core.data.Value>> iter = scanner.iterator();
             if (iter.hasNext()) {
-                return Double.parseDouble(new String(iter.next().getValue().get()));
+                return Double.parseDouble(new String(iter.next().getValue().get(), StandardCharsets.UTF_8));
             }
-        } catch (Exception e) {
+        } catch (final Exception e) {
             throw new RdfDAOException(e);
         }
 
         //default
         return -1;
-	}
-
-	@Override
-	public double getCardinality(AccumuloRdfConfiguration conf,
-			org.apache.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF card,
-			List<Value> val) throws RdfDAOException {
-		return getCardinality(conf, card, val, null);
-	}
+    }
+
+    @Override
+    public double getCardinality(final AccumuloRdfConfiguration conf,
+            final org.apache.rya.api.persist.RdfEvalStatsDAO.CARDINALITY_OF card, final List<Value> val)
+            throws RdfDAOException {
+        return getCardinality(conf, card, val, null);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRyaDAO.java
----------------------------------------------------------------------
diff --git a/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRyaDAO.java b/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRyaDAO.java
index f1f7c03..8c99e44 100644
--- a/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRyaDAO.java
+++ b/dao/accumulo.rya/src/main/java/org/apache/rya/accumulo/AccumuloRyaDAO.java
@@ -1,5 +1,3 @@
-package org.apache.rya.accumulo;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -18,8 +16,7 @@ package org.apache.rya.accumulo;
  * specific language governing permissions and limitations
  * under the License.
  */
-
-
+package org.apache.rya.accumulo;
 
 import static com.google.common.base.Preconditions.checkNotNull;
 import static org.apache.rya.accumulo.AccumuloRdfConstants.ALL_AUTHORIZATIONS;
@@ -33,6 +30,7 @@ import static org.apache.rya.api.RdfCloudTripleStoreConstants.RTS_VERSION_PREDIC
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.VERSION_RYA;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Iterator;
@@ -319,7 +317,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
     public void addNamespace(final String pfx, final String namespace) throws RyaDAOException {
         try {
             final Mutation m = new Mutation(new Text(pfx));
-            m.put(INFO_NAMESPACE_TXT, EMPTY_TEXT, new Value(namespace.getBytes()));
+            m.put(INFO_NAMESPACE_TXT, EMPTY_TEXT, new Value(namespace.getBytes(StandardCharsets.UTF_8)));
             bw_ns.addMutation(m);
             if (flushEachUpdate) { mt_bw.flush(); }
         } catch (final Exception e) {
@@ -338,7 +336,7 @@ public class AccumuloRyaDAO implements RyaDAO<AccumuloRdfConfiguration>, RyaName
                     .iterator();
 
             if (iterator.hasNext()) {
-                return new String(iterator.next().getValue().get());
+                return new String(iterator.next().getValue().get(), StandardCharsets.UTF_8);
             }
         } catch (final Exception e) {
             throw new RyaDAOException(e);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java
index d2d6d7e..6956f49 100644
--- a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java
+++ b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBNamespaceManager.java
@@ -1,5 +1,3 @@
-package org.apache.rya.mongodb.dao;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -8,9 +6,9 @@ package org.apache.rya.mongodb.dao;
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- * 
+ *
  *   http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
@@ -18,18 +16,16 @@ package org.apache.rya.mongodb.dao;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.mongodb.dao;
 
-
-import info.aduna.iteration.CloseableIteration;
-
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Map;
 
+import org.apache.commons.codec.binary.Hex;
 import org.apache.rya.api.persist.RyaDAOException;
 import org.apache.rya.mongodb.MongoDBRdfConfiguration;
-
-import org.apache.commons.codec.binary.Hex;
 import org.openrdf.model.Namespace;
 
 import com.mongodb.BasicDBObject;
@@ -37,22 +33,28 @@ import com.mongodb.DBCollection;
 import com.mongodb.DBCursor;
 import com.mongodb.DBObject;
 
+import info.aduna.iteration.CloseableIteration;
+
 public class SimpleMongoDBNamespaceManager implements MongoDBNamespaceManager {
 
 	public class NamespaceImplementation implements Namespace {
 
-		private String namespace;
-		private String prefix;
+		private final String namespace;
+		private final String prefix;
 
-		public NamespaceImplementation(String namespace, String prefix) {
+		public NamespaceImplementation(final String namespace, final String prefix) {
 			this.namespace = namespace;
 			this.prefix = prefix;
 		}
 
 		@Override
-		public int compareTo(Namespace o) {
-			if (!namespace.equalsIgnoreCase(o.getName())) return namespace.compareTo(o.getName());
-			if (!prefix.equalsIgnoreCase(o.getPrefix())) return prefix.compareTo(o.getPrefix());
+		public int compareTo(final Namespace o) {
+			if (!namespace.equalsIgnoreCase(o.getName())) {
+                return namespace.compareTo(o.getName());
+            }
+			if (!prefix.equalsIgnoreCase(o.getPrefix())) {
+                return prefix.compareTo(o.getPrefix());
+            }
 			return 0;
 		}
 
@@ -70,9 +72,9 @@ public class SimpleMongoDBNamespaceManager implements MongoDBNamespaceManager {
 
 	public class MongoCursorIteration implements
 			CloseableIteration<Namespace, RyaDAOException> {
-		private DBCursor cursor;
+		private final DBCursor cursor;
 
-		public MongoCursorIteration(DBCursor cursor2) {
+		public MongoCursorIteration(final DBCursor cursor2) {
 			this.cursor = cursor2;
 		}
 
@@ -83,12 +85,12 @@ public class SimpleMongoDBNamespaceManager implements MongoDBNamespaceManager {
 
 		@Override
 		public Namespace next() throws RyaDAOException {
-			DBObject ns = cursor.next();
-			Map values = ns.toMap();
-			String namespace = (String) values.get(NAMESPACE);
-			String prefix = (String) values.get(PREFIX);
-			
-			Namespace temp =  new NamespaceImplementation(namespace, prefix);
+			final DBObject ns = cursor.next();
+			final Map values = ns.toMap();
+			final String namespace = (String) values.get(NAMESPACE);
+			final String prefix = (String) values.get(PREFIX);
+
+			final Namespace temp =  new NamespaceImplementation(namespace, prefix);
 			return temp;
 		}
 
@@ -108,22 +110,22 @@ public class SimpleMongoDBNamespaceManager implements MongoDBNamespaceManager {
 	private static final String PREFIX = "prefix";
 	private static final String NAMESPACE = "namespace";
 	private MongoDBRdfConfiguration conf;
-	private DBCollection nsColl;
+	private final DBCollection nsColl;
 
 
-	public SimpleMongoDBNamespaceManager(DBCollection nameSpaceCollection) {
+	public SimpleMongoDBNamespaceManager(final DBCollection nameSpaceCollection) {
 		nsColl = nameSpaceCollection;
 	}
-	
+
 	@Override
-	public void createIndices(DBCollection coll){
+	public void createIndices(final DBCollection coll){
 		coll.createIndex(PREFIX);
 		coll.createIndex(NAMESPACE);
 	}
 
 
 	@Override
-	public void setConf(MongoDBRdfConfiguration paramC) {
+	public void setConf(final MongoDBRdfConfiguration paramC) {
 		this.conf = paramC;
 	}
 
@@ -134,47 +136,47 @@ public class SimpleMongoDBNamespaceManager implements MongoDBNamespaceManager {
 	}
 
 	@Override
-	public void addNamespace(String prefix, String namespace)
+	public void addNamespace(final String prefix, final String namespace)
 			throws RyaDAOException {
-		String id = prefix;
-		byte[] bytes = id.getBytes();
+		final String id = prefix;
+		byte[] bytes = id.getBytes(StandardCharsets.UTF_8);
 		try {
-			MessageDigest digest = MessageDigest.getInstance("SHA-1");
+			final MessageDigest digest = MessageDigest.getInstance("SHA-1");
 			bytes = digest.digest(bytes);
-		} catch (NoSuchAlgorithmException e) {
+		} catch (final NoSuchAlgorithmException e) {
 			// TODO Auto-generated catch block
 			e.printStackTrace();
 		}
-		BasicDBObject doc = new BasicDBObject(ID, new String(Hex.encodeHex(bytes)))
+		final BasicDBObject doc = new BasicDBObject(ID, new String(Hex.encodeHex(bytes)))
 		.append(PREFIX, prefix)
 	    .append(NAMESPACE, namespace);
 		nsColl.insert(doc);
-		
+
 	}
 
 	@Override
-	public String getNamespace(String prefix) throws RyaDAOException {
-        DBObject query = new BasicDBObject().append(PREFIX, prefix);
-        DBCursor cursor = nsColl.find(query);
+	public String getNamespace(final String prefix) throws RyaDAOException {
+        final DBObject query = new BasicDBObject().append(PREFIX, prefix);
+        final DBCursor cursor = nsColl.find(query);
         String nameSpace = prefix;
         while (cursor.hasNext()){
-          DBObject obj = cursor.next();	
+          final DBObject obj = cursor.next();
           nameSpace = (String) obj.toMap().get(NAMESPACE);
         }
         return nameSpace;
 	}
 
 	@Override
-	public void removeNamespace(String prefix) throws RyaDAOException {
-        DBObject query = new BasicDBObject().append(PREFIX, prefix);
+	public void removeNamespace(final String prefix) throws RyaDAOException {
+        final DBObject query = new BasicDBObject().append(PREFIX, prefix);
 		nsColl.remove(query);
 	}
 
 	@Override
 	public CloseableIteration<? extends Namespace, RyaDAOException> iterateNamespace()
 			throws RyaDAOException {
-        DBObject query = new BasicDBObject();
-        DBCursor cursor = nsColl.find(query);
+        final DBObject query = new BasicDBObject();
+        final DBCursor cursor = nsColl.find(query);
 		return new MongoCursorIteration(cursor);
 	}
 

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
----------------------------------------------------------------------
diff --git a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
index a8f548c..388e807 100644
--- a/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
+++ b/dao/mongodb.rya/src/main/java/org/apache/rya/mongodb/dao/SimpleMongoDBStorageStrategy.java
@@ -1,5 +1,3 @@
-package org.apache.rya.mongodb.dao;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -18,9 +16,11 @@ package org.apache.rya.mongodb.dao;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.mongodb.dao;
 
 import static org.openrdf.model.vocabulary.XMLSchema.ANYURI;
 
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Map;
@@ -166,7 +166,7 @@ public class SimpleMongoDBStorageStrategy implements MongoDBStorageStrategy<RyaS
         }
         final String id = statement.getSubject().getData() + " " +
                 statement.getPredicate().getData() + " " +  statement.getObject().getData() + " " + context;
-        byte[] bytes = id.getBytes();
+        byte[] bytes = id.getBytes(StandardCharsets.UTF_8);
         try {
             final MessageDigest digest = MessageDigest.getInstance("SHA-1");
             bytes = digest.digest(bytes);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/indexing/src/main/java/org/apache/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java b/extras/indexing/src/main/java/org/apache/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java
index 6e5b72f..595fc36 100644
--- a/extras/indexing/src/main/java/org/apache/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java
+++ b/extras/indexing/src/main/java/org/apache/rya/accumulo/documentIndex/DocumentIndexIntersectingIterator.java
@@ -20,6 +20,7 @@
 package org.apache.rya.accumulo.documentIndex;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Map;
@@ -33,7 +34,6 @@ import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.iterators.IteratorEnvironment;
 import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
-import org.apache.accumulo.core.tabletserver.thrift.TabletClientService;
 import org.apache.accumulo.core.util.TextUtil;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.io.Text;
@@ -560,9 +560,9 @@ public class DocumentIndexIntersectingIterator implements SortedKeyValueIterator
   protected static String encodeColumns(TextColumn[] columns) {
       StringBuilder sb = new StringBuilder();
       for (int i = 0; i < columns.length; i++) {
-        sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i].getColumnFamily()))));
+        sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i].getColumnFamily())), StandardCharsets.UTF_8));
         sb.append('\n');
-        sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i].getColumnQualifier()))));
+        sb.append(new String(Base64.encodeBase64(TextUtil.getBytes(columns[i].getColumnQualifier())), StandardCharsets.UTF_8));
         sb.append('\u0001');
       }
       return sb.toString();
@@ -575,8 +575,8 @@ public class DocumentIndexIntersectingIterator implements SortedKeyValueIterator
       TextColumn[] columnTexts = new TextColumn[columnStrings.length];
       for (int i = 0; i < columnStrings.length; i++) {
         String[] columnComponents = columnStrings[i].split("\n");
-        columnTexts[i] = new TextColumn(new Text(Base64.decodeBase64(columnComponents[0].getBytes())), 
-                new Text(Base64.decodeBase64(columnComponents[1].getBytes())));
+        columnTexts[i] = new TextColumn(new Text(Base64.decodeBase64(columnComponents[0].getBytes(StandardCharsets.UTF_8))), 
+                new Text(Base64.decodeBase64(columnComponents[1].getBytes(StandardCharsets.UTF_8))));
       }
       return columnTexts;
     }
@@ -591,7 +591,7 @@ public class DocumentIndexIntersectingIterator implements SortedKeyValueIterator
    */
   protected static String encodeContext(String context) {
  
-    return new String(Base64.encodeBase64(context.getBytes()));
+    return new String(Base64.encodeBase64(context.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8);
   }
   
  
@@ -605,7 +605,7 @@ public class DocumentIndexIntersectingIterator implements SortedKeyValueIterator
         if (context == null) {
             return null;
         } else {
-            return new String(Base64.decodeBase64(context.getBytes()));
+            return new String(Base64.decodeBase64(context.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8);
         }
     }
   
@@ -621,7 +621,7 @@ public class DocumentIndexIntersectingIterator implements SortedKeyValueIterator
         else
           bytes[i] = 0;
       }
-      return new String(Base64.encodeBase64(bytes));
+      return new String(Base64.encodeBase64(bytes), StandardCharsets.UTF_8);
     }
     
     /**
@@ -633,7 +633,7 @@ public class DocumentIndexIntersectingIterator implements SortedKeyValueIterator
       if (prefixes == null)
         return null;
       
-      byte[] bytes = Base64.decodeBase64(prefixes.getBytes());
+      byte[] bytes = Base64.decodeBase64(prefixes.getBytes(StandardCharsets.UTF_8));
       boolean[] bFlags = new boolean[bytes.length];
       for (int i = 0; i < bytes.length; i++) {
         if (bytes[i] == 1)

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/indexing/src/main/java/org/apache/rya/indexing/KeyParts.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/indexing/KeyParts.java b/extras/indexing/src/main/java/org/apache/rya/indexing/KeyParts.java
index 1e988fe..11ff8c0 100644
--- a/extras/indexing/src/main/java/org/apache/rya/indexing/KeyParts.java
+++ b/extras/indexing/src/main/java/org/apache/rya/indexing/KeyParts.java
@@ -1,5 +1,3 @@
-package org.apache.rya.indexing;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -18,8 +16,9 @@ package org.apache.rya.indexing;
  * specific language governing permissions and limitations
  * under the License.
  */
+package org.apache.rya.indexing;
 
-
+import java.nio.charset.StandardCharsets;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
@@ -202,7 +201,7 @@ public class KeyParts implements Iterable<KeyParts> {
 			return "KeyParts [contraintPrefix=" + toHumanString(constraintPrefix) + ", instant=" + toHumanString(instant.getAsKeyBytes()) + ", cf=" + cf + ", cq=" + cq + "]";
 		}
 	    private static void appendSubject(final Statement statement, final Text keyText) {
-	        final Value statementValue = new Value(StatementSerializer.writeSubject(statement).getBytes());
+	        final Value statementValue = new Value(StatementSerializer.writeSubject(statement).getBytes(StandardCharsets.UTF_8));
 	        final byte[] hashOfValue = uniqueFromValueForKey(statementValue);
 	        appendBytes(HASH_PREFIX, keyText); // prefix the hash with a zero byte.
 	        appendBytes(hashOfValue, keyText);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java
index a84670f..25a272d 100644
--- a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java
+++ b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/AccumuloDocIdIndexer.java
@@ -1,5 +1,3 @@
-package org.apache.rya.indexing.accumulo.entity;
-
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
@@ -18,13 +16,13 @@ package org.apache.rya.indexing.accumulo.entity;
  * specific language governing permissions and limitations
  * under the License.
  */
-
+package org.apache.rya.indexing.accumulo.entity;
 
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.DELIM_BYTE;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTE;
-import info.aduna.iteration.CloseableIteration;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -35,17 +33,6 @@ import java.util.Map.Entry;
 import java.util.NoSuchElementException;
 import java.util.Set;
 
-import org.apache.rya.accumulo.AccumuloRdfConfiguration;
-import org.apache.rya.accumulo.documentIndex.DocIndexIteratorUtil;
-import org.apache.rya.accumulo.documentIndex.DocumentIndexIntersectingIterator;
-import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
-import org.apache.rya.api.domain.RyaURI;
-import org.apache.rya.api.resolver.RyaContext;
-import org.apache.rya.api.resolver.RyaToRdfConversions;
-import org.apache.rya.api.resolver.RyaTypeResolverException;
-import org.apache.rya.indexing.DocIdIndexer;
-import org.apache.rya.indexing.accumulo.ConfigUtils;
-
 import org.apache.accumulo.core.client.AccumuloException;
 import org.apache.accumulo.core.client.AccumuloSecurityException;
 import org.apache.accumulo.core.client.BatchScanner;
@@ -57,6 +44,16 @@ import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.hadoop.io.Text;
+import org.apache.rya.accumulo.AccumuloRdfConfiguration;
+import org.apache.rya.accumulo.documentIndex.DocIndexIteratorUtil;
+import org.apache.rya.accumulo.documentIndex.DocumentIndexIntersectingIterator;
+import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
+import org.apache.rya.api.domain.RyaURI;
+import org.apache.rya.api.resolver.RyaContext;
+import org.apache.rya.api.resolver.RyaToRdfConversions;
+import org.apache.rya.api.resolver.RyaTypeResolverException;
+import org.apache.rya.indexing.DocIdIndexer;
+import org.apache.rya.indexing.accumulo.ConfigUtils;
 import org.openrdf.query.BindingSet;
 import org.openrdf.query.MalformedQueryException;
 import org.openrdf.query.QueryEvaluationException;
@@ -72,14 +69,16 @@ import com.google.common.collect.HashMultimap;
 import com.google.common.collect.Sets;
 import com.google.common.primitives.Bytes;
 
+import info.aduna.iteration.CloseableIteration;
+
 public class AccumuloDocIdIndexer implements DocIdIndexer {
 
 
 
     private BatchScanner bs;
-    private AccumuloRdfConfiguration conf;
+    private final AccumuloRdfConfiguration conf;
 
-    public AccumuloDocIdIndexer(RdfCloudTripleStoreConfiguration conf) throws AccumuloException, AccumuloSecurityException {
+    public AccumuloDocIdIndexer(final RdfCloudTripleStoreConfiguration conf) throws AccumuloException, AccumuloSecurityException {
         Preconditions.checkArgument(conf instanceof RdfCloudTripleStoreConfiguration, "conf must be isntance of RdfCloudTripleStoreConfiguration");
         this.conf = (AccumuloRdfConfiguration) conf;
         //Connector conn = ConfigUtils.getConnector(conf);
@@ -88,22 +87,22 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
 
 
 
-    public CloseableIteration<BindingSet, QueryEvaluationException> queryDocIndex(String sparqlQuery,
-            Collection<BindingSet> constraints) throws TableNotFoundException, QueryEvaluationException {
+    public CloseableIteration<BindingSet, QueryEvaluationException> queryDocIndex(final String sparqlQuery,
+            final Collection<BindingSet> constraints) throws TableNotFoundException, QueryEvaluationException {
 
-        SPARQLParser parser = new SPARQLParser();
+        final SPARQLParser parser = new SPARQLParser();
         ParsedQuery pq1 = null;
         try {
             pq1 = parser.parseQuery(sparqlQuery, null);
-        } catch (MalformedQueryException e) {
+        } catch (final MalformedQueryException e) {
             e.printStackTrace();
         }
 
-        TupleExpr te1 = pq1.getTupleExpr();
-        List<StatementPattern> spList1 = StatementPatternCollector.process(te1);
+        final TupleExpr te1 = pq1.getTupleExpr();
+        final List<StatementPattern> spList1 = StatementPatternCollector.process(te1);
 
         if(StarQuery.isValidStarQuery(spList1)) {
-            StarQuery sq1 = new StarQuery(spList1);
+            final StarQuery sq1 = new StarQuery(spList1);
             return queryDocIndex(sq1, constraints);
         } else {
             throw new IllegalArgumentException("Invalid star query!");
@@ -115,8 +114,8 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
 
 
     @Override
-    public CloseableIteration<BindingSet, QueryEvaluationException> queryDocIndex(StarQuery query,
-            Collection<BindingSet> constraints) throws TableNotFoundException, QueryEvaluationException {
+    public CloseableIteration<BindingSet, QueryEvaluationException> queryDocIndex(final StarQuery query,
+            final Collection<BindingSet> constraints) throws TableNotFoundException, QueryEvaluationException {
 
         final StarQuery starQ = query;
         final Iterator<BindingSet> bs = constraints.iterator();
@@ -124,7 +123,7 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
         final Set<String> unCommonVarNames;
         final Set<String> commonVarNames;
         if (bs2.hasNext()) {
-            BindingSet currBs = bs2.next();
+            final BindingSet currBs = bs2.next();
             commonVarNames = StarQuery.getCommonVars(query, currBs);
             unCommonVarNames = Sets.difference(currBs.getBindingNames(), commonVarNames);
         } else {
@@ -138,17 +137,17 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
             final String commonVar = starQ.getCommonVarName();
             final Iterator<Entry<Key, Value>> intersections;
             final BatchScanner scan;
-            Set<Range> ranges = Sets.newHashSet();
+            final Set<Range> ranges = Sets.newHashSet();
 
             while(bs.hasNext()) {
 
-                BindingSet currentBs = bs.next();
+                final BindingSet currentBs = bs.next();
 
                 if(currentBs.getBinding(commonVar) == null) {
                     continue;
                 }
 
-                String row = currentBs.getBinding(commonVar).getValue().stringValue();
+                final String row = currentBs.getBinding(commonVar).getValue().stringValue();
                 ranges.add(new Range(row));
                 map.put(row, currentBs);
 
@@ -246,7 +245,7 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
                 private boolean init = false;
                 private BindingSet currentBs;
                 private StarQuery sq = new StarQuery(starQ);
-                private Set<Range> emptyRangeSet = Sets.newHashSet();
+                private final Set<Range> emptyRangeSet = Sets.newHashSet();
                 private BatchScanner scan;
 
                 @Override
@@ -327,16 +326,16 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
         }
     }
 
-    private QueryBindingSet deserializeKey(Key key, StarQuery sq, BindingSet currentBs, Set<String> unCommonVar) {
+    private QueryBindingSet deserializeKey(final Key key, final StarQuery sq, final BindingSet currentBs, final Set<String> unCommonVar) {
 
 
-        QueryBindingSet currentSolutionBs = new QueryBindingSet();
+        final QueryBindingSet currentSolutionBs = new QueryBindingSet();
 
-        Text row = key.getRow();
-        Text cq = key.getColumnQualifier();
+        final Text row = key.getRow();
+        final Text cq = key.getColumnQualifier();
 
 
-        String[] cqArray = cq.toString().split(DocIndexIteratorUtil.DOC_ID_INDEX_DELIM);
+        final String[] cqArray = cq.toString().split(DocIndexIteratorUtil.DOC_ID_INDEX_DELIM);
 
         boolean commonVarSet = false;
 
@@ -346,63 +345,63 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
         }
 
         if (!commonVarSet && sq.isCommonVarURI()) {
-            RyaURI rURI = new RyaURI(row.toString());
+            final RyaURI rURI = new RyaURI(row.toString());
             currentSolutionBs.addBinding(sq.getCommonVarName(),
                     RyaToRdfConversions.convertValue(rURI));
             commonVarSet = true;
         }
 
-        for (String s : sq.getUnCommonVars()) {
+        for (final String s : sq.getUnCommonVars()) {
 
-            byte[] cqBytes = cqArray[sq.getVarPos().get(s)].getBytes();
-            int firstIndex = Bytes.indexOf(cqBytes, DELIM_BYTE);
-            int secondIndex = Bytes.lastIndexOf(cqBytes, DELIM_BYTE);
-            int typeIndex = Bytes.indexOf(cqBytes, TYPE_DELIM_BYTE);
-            byte[] tripleComponent = Arrays.copyOfRange(cqBytes, firstIndex + 1, secondIndex);
-            byte[] cqContent = Arrays.copyOfRange(cqBytes, secondIndex + 1, typeIndex);
-            byte[] objType = Arrays.copyOfRange(cqBytes, typeIndex, cqBytes.length);
+            final byte[] cqBytes = cqArray[sq.getVarPos().get(s)].getBytes(StandardCharsets.UTF_8);
+            final int firstIndex = Bytes.indexOf(cqBytes, DELIM_BYTE);
+            final int secondIndex = Bytes.lastIndexOf(cqBytes, DELIM_BYTE);
+            final int typeIndex = Bytes.indexOf(cqBytes, TYPE_DELIM_BYTE);
+            final String tripleComponent = new String(Arrays.copyOfRange(cqBytes, firstIndex + 1, secondIndex), StandardCharsets.UTF_8);
+            final byte[] cqContent = Arrays.copyOfRange(cqBytes, secondIndex + 1, typeIndex);
+            final byte[] objType = Arrays.copyOfRange(cqBytes, typeIndex, cqBytes.length);
 
-            if (new String(tripleComponent).equals("object")) {
-                byte[] object = Bytes.concat(cqContent, objType);
+            if (tripleComponent.equals("object")) {
+                final byte[] object = Bytes.concat(cqContent, objType);
                 org.openrdf.model.Value v = null;
                 try {
                     v = RyaToRdfConversions.convertValue(RyaContext.getInstance().deserialize(
                             object));
-                } catch (RyaTypeResolverException e) {
+                } catch (final RyaTypeResolverException e) {
                     e.printStackTrace();
                 }
                 currentSolutionBs.addBinding(s, v);
 
-            } else if (new String(tripleComponent).equals("subject")) {
+            } else if (tripleComponent.equals("subject")) {
                 if (!commonVarSet) {
-                    byte[] object = Bytes.concat(row.getBytes(), objType);
+                    final byte[] object = Bytes.concat(row.getBytes(), objType);
                     org.openrdf.model.Value v = null;
                     try {
                         v = RyaToRdfConversions.convertValue(RyaContext.getInstance().deserialize(
                                 object));
-                    } catch (RyaTypeResolverException e) {
+                    } catch (final RyaTypeResolverException e) {
                         e.printStackTrace();
                     }
                     currentSolutionBs.addBinding(sq.getCommonVarName(), v);
                     commonVarSet = true;
                 }
-                RyaURI rURI = new RyaURI(new String(cqContent));
+                final RyaURI rURI = new RyaURI(new String(cqContent, StandardCharsets.UTF_8));
                 currentSolutionBs.addBinding(s, RyaToRdfConversions.convertValue(rURI));
             } else {
                 throw new IllegalArgumentException("Invalid row.");
             }
         }
-        for (String s : unCommonVar) {
+        for (final String s : unCommonVar) {
             currentSolutionBs.addBinding(s, currentBs.getValue(s));
         }
         return currentSolutionBs;
     }
 
-    private BatchScanner runQuery(StarQuery query, Collection<Range> ranges) throws QueryEvaluationException {
+    private BatchScanner runQuery(final StarQuery query, Collection<Range> ranges) throws QueryEvaluationException {
 
         try {
             if (ranges.size() == 0) {
-                String rangeText = query.getCommonVarValue();
+                final String rangeText = query.getCommonVarValue();
                 Range r;
                 if (rangeText != null) {
                     r = new Range(new Text(query.getCommonVarValue()));
@@ -412,15 +411,15 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
                 ranges = Collections.singleton(r);
             }
 
-            Connector accCon = ConfigUtils.getConnector(conf);
-            IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class);
+            final Connector accCon = ConfigUtils.getConnector(conf);
+            final IteratorSetting is = new IteratorSetting(30, "fii", DocumentIndexIntersectingIterator.class);
 
             DocumentIndexIntersectingIterator.setColumnFamilies(is, query.getColumnCond());
 
             if (query.hasContext()) {
                 DocumentIndexIntersectingIterator.setContext(is, query.getContextURI());
             }
-            
+
             final Authorizations auths;
             final String authsStr = conf.get(ConfigUtils.CLOUDBASE_AUTHS);
             if(authsStr == null || authsStr.isEmpty()) {
@@ -428,7 +427,7 @@ public class AccumuloDocIdIndexer implements DocIdIndexer {
             } else {
                 auths = new Authorizations(authsStr);
             }
-            
+
             bs = accCon.createBatchScanner(EntityCentricIndex.getTableName(conf), auths, 15);
             bs.addScanIterator(is);
             bs.setRanges(ranges);

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/538cfccc/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/EntityCentricIndex.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/EntityCentricIndex.java b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/EntityCentricIndex.java
index 0676e3d..ab4bd55 100644
--- a/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/EntityCentricIndex.java
+++ b/extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/entity/EntityCentricIndex.java
@@ -27,6 +27,7 @@ import static org.apache.rya.api.RdfCloudTripleStoreConstants.EMPTY_TEXT;
 import static org.apache.rya.api.RdfCloudTripleStoreConstants.TYPE_DELIM_BYTES;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
@@ -68,6 +69,9 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
     private static final Logger logger = Logger.getLogger(EntityCentricIndex.class);
     private static final String TABLE_SUFFIX = "EntityCentricIndex";
 
+    private static final String OBJECT = "object";
+    private static final String SUBJECT = "subject";
+
     private AccumuloRdfConfiguration conf;
     private BatchWriter writer;
     private boolean isInit = false;
@@ -235,15 +239,15 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
         final byte[] columnVisibility = stmt.getColumnVisibility();
         final byte[] value = stmt.getValue();
         assert subject != null && predicate != null && object != null;
-        final byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes();
-        final byte[] subjBytes = subject.getData().getBytes();
-        final byte[] predBytes = predicate.getData().getBytes();
+        final byte[] cf = (context == null) ? EMPTY_BYTES : context.getData().getBytes(StandardCharsets.UTF_8);
+        final byte[] subjBytes = subject.getData().getBytes(StandardCharsets.UTF_8);
+        final byte[] predBytes = predicate.getData().getBytes(StandardCharsets.UTF_8);
         final byte[][] objBytes = RyaContext.getInstance().serializeType(object);
 
         return Lists.newArrayList(new TripleRow(subjBytes,
             predBytes,
             Bytes.concat(cf, DELIM_BYTES,
-                "object".getBytes(), DELIM_BYTES,
+                OBJECT.getBytes(StandardCharsets.UTF_8), DELIM_BYTES,
                 objBytes[0], objBytes[1]),
             timestamp,
             columnVisibility,
@@ -251,7 +255,7 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
             new TripleRow(objBytes[0],
                 predBytes,
                 Bytes.concat(cf, DELIM_BYTES,
-                    "subject".getBytes(), DELIM_BYTES,
+                    SUBJECT.getBytes(StandardCharsets.UTF_8), DELIM_BYTES,
                     subjBytes, objBytes[1]),
                 timestamp,
                 columnVisibility,
@@ -282,25 +286,25 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
         final byte[] columnFamily = Arrays.copyOf(data, split);
         final byte[] edgeBytes = Arrays.copyOfRange(data, split + DELIM_BYTES.length, data.length);
         split = Bytes.indexOf(edgeBytes, DELIM_BYTES);
-        String otherNodeVar = new String(Arrays.copyOf(edgeBytes, split));
-        byte[] otherNodeBytes = Arrays.copyOfRange(edgeBytes,  split + DELIM_BYTES.length, edgeBytes.length);
+        final String otherNodeVar = new String(Arrays.copyOf(edgeBytes, split), StandardCharsets.UTF_8);
+        final byte[] otherNodeBytes = Arrays.copyOfRange(edgeBytes,  split + DELIM_BYTES.length, edgeBytes.length);
         split = Bytes.indexOf(otherNodeBytes, TYPE_DELIM_BYTES);
-        byte[] otherNodeData = Arrays.copyOf(otherNodeBytes,  split);
-        byte[] typeBytes = Arrays.copyOfRange(otherNodeBytes,  split, otherNodeBytes.length);
+        final byte[] otherNodeData = Arrays.copyOf(otherNodeBytes,  split);
+        final byte[] typeBytes = Arrays.copyOfRange(otherNodeBytes,  split, otherNodeBytes.length);
         byte[] objectBytes;
         RyaURI subject;
-        final RyaURI predicate = new RyaURI(new String(predicateBytes));
+        final RyaURI predicate = new RyaURI(new String(predicateBytes, StandardCharsets.UTF_8));
         RyaType object;
         RyaURI context = null;
         // Expect either: entity=subject.data, otherNodeVar="object", otherNodeBytes={object.data, object.datatype}
         //            or: entity=object.data, otherNodeVar="subject", otherNodeBytes={subject.data, object.datatype}
         switch (otherNodeVar) {
-            case "subject":
-                subject = new RyaURI(new String(otherNodeData));
+            case SUBJECT:
+                subject = new RyaURI(new String(otherNodeData, StandardCharsets.UTF_8));
                 objectBytes = Bytes.concat(entityBytes, typeBytes);
                 break;
-            case "object":
-                subject = new RyaURI(new String(entityBytes));
+            case OBJECT:
+                subject = new RyaURI(new String(entityBytes, StandardCharsets.UTF_8));
                 objectBytes = Bytes.concat(otherNodeData, typeBytes);
                 break;
             default:
@@ -309,7 +313,7 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
         }
         object = RyaContext.getInstance().deserialize(objectBytes);
         if (columnFamily != null && columnFamily.length > 0) {
-            context = new RyaURI(new String(columnFamily));
+            context = new RyaURI(new String(columnFamily, StandardCharsets.UTF_8));
         }
         return new RyaStatement(subject, predicate, object, context,
                 null, columnVisibility, valueBytes, timestamp);
@@ -323,33 +327,33 @@ public class EntityCentricIndex extends AbstractAccumuloIndexer {
      * @throws IOException if edge direction can't be extracted as expected.
      * @throws RyaTypeResolverException if a type error occurs deserializing the statement's object.
      */
-    public static RyaType getRyaType(Key key, Value value) throws RyaTypeResolverException, IOException {
+    public static RyaType getRyaType(final Key key, final Value value) throws RyaTypeResolverException, IOException {
         assert key != null;
         assert value != null;
-        byte[] entityBytes = key.getRowData().toArray();
-        byte[] data = key.getColumnQualifierData().toArray();
+        final byte[] entityBytes = key.getRowData().toArray();
+        final byte[] data = key.getColumnQualifierData().toArray();
 
         // main entity is either the subject or object
         // data contains: column family , var name of other node , data of other node + datatype of object
         int split = Bytes.indexOf(data, DELIM_BYTES);
-        byte[] edgeBytes = Arrays.copyOfRange(data, split + DELIM_BYTES.length, data.length);
+        final byte[] edgeBytes = Arrays.copyOfRange(data, split + DELIM_BYTES.length, data.length);
         split = Bytes.indexOf(edgeBytes, DELIM_BYTES);
-        String otherNodeVar = new String(Arrays.copyOf(edgeBytes, split));
-        byte[] otherNodeBytes = Arrays.copyOfRange(edgeBytes,  split + DELIM_BYTES.length, edgeBytes.length);
+        final String otherNodeVar = new String(Arrays.copyOf(edgeBytes, split), StandardCharsets.UTF_8);
+        final byte[] otherNodeBytes = Arrays.copyOfRange(edgeBytes,  split + DELIM_BYTES.length, edgeBytes.length);
         split = Bytes.indexOf(otherNodeBytes, TYPE_DELIM_BYTES);
-        byte[] typeBytes = Arrays.copyOfRange(otherNodeBytes,  split, otherNodeBytes.length);
+        final byte[] typeBytes = Arrays.copyOfRange(otherNodeBytes,  split, otherNodeBytes.length);
         byte[] objectBytes;
         RyaURI subject;
         RyaType object;
         RyaType type = null;
         switch (otherNodeVar) {
-            case "subject":
+            case SUBJECT:
                 objectBytes = Bytes.concat(entityBytes, typeBytes);
-                object = RyaContext.getInstance().deserialize(objectBytes); //return this
+                object = RyaContext.getInstance().deserialize(objectBytes);
                 type = object;
                 break;
-            case "object":
-                subject = new RyaURI(new String(entityBytes));//return this
+            case OBJECT:
+                subject = new RyaURI(new String(entityBytes, StandardCharsets.UTF_8));
                 type = subject;
                 break;
             default:


[02/11] incubator-rya git commit: RYA-402 Create Kafka reusable test code project. Closes #242.

Posted by ca...@apache.org.
RYA-402 Create Kafka reusable test code project. Closes #242.


Project: http://git-wip-us.apache.org/repos/asf/incubator-rya/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-rya/commit/4089e706
Tree: http://git-wip-us.apache.org/repos/asf/incubator-rya/tree/4089e706
Diff: http://git-wip-us.apache.org/repos/asf/incubator-rya/diff/4089e706

Branch: refs/heads/master
Commit: 4089e706ca17c54ced3723602672f1a69d2aa2be
Parents: 6dd81bd
Author: kchilton2 <ke...@gmail.com>
Authored: Tue Oct 10 18:36:25 2017 -0400
Committer: jdasch <hc...@gmail.com>
Committed: Thu Oct 12 12:51:38 2017 -0400

----------------------------------------------------------------------
 .../pcj/matching/AccumuloIndexSetProvider.java  |   3 +-
 ...dicNotificationApplicationConfiguration.java |   2 +-
 .../exporter/KafkaExporterExecutor.java         |   2 +-
 .../KafkaPeriodicBindingSetExporter.java        |   2 +-
 .../notification/pruner/AccumuloBinPruner.java  |   2 +-
 .../pruner/PeriodicQueryPruner.java             |   2 +-
 extras/periodic.notification/tests/pom.xml      |   4 +
 .../PeriodicNotificationApplicationIT.java      | 288 ++++++++++---------
 .../PeriodicNotificationExporterIT.java         |   4 +-
 .../PeriodicCommandNotificationConsumerIT.java  |  38 +--
 .../app/batch/AbstractSpanBatchInformation.java |   2 +-
 .../fluo/app/batch/JoinBatchInformation.java    |   2 +-
 .../export/kafka/KafkaExportParameterBase.java  |   2 +-
 .../rya/kafka/base/EmbeddedKafkaInstance.java   | 143 ---------
 .../rya/kafka/base/EmbeddedKafkaSingleton.java  |  87 ------
 .../org/apache/rya/kafka/base/KafkaITBase.java  |  38 ---
 .../rya/kafka/base/KafkaTestInstanceRule.java   |  98 -------
 pom.xml                                         |  11 +
 test/kafka/pom.xml                              |  81 ++++++
 .../rya/test/kafka/EmbeddedKafkaInstance.java   | 142 +++++++++
 .../rya/test/kafka/EmbeddedKafkaSingleton.java  |  87 ++++++
 .../org/apache/rya/test/kafka/KafkaITBase.java  |  38 +++
 .../rya/test/kafka/KafkaTestInstanceRule.java   |  98 +++++++
 .../org/apache/rya/test/kafka/PortUtils.java    |  44 +++
 test/pom.xml                                    |  39 +++
 25 files changed, 721 insertions(+), 538 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/indexing/src/main/java/org/apache/rya/indexing/pcj/matching/AccumuloIndexSetProvider.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/org/apache/rya/indexing/pcj/matching/AccumuloIndexSetProvider.java b/extras/indexing/src/main/java/org/apache/rya/indexing/pcj/matching/AccumuloIndexSetProvider.java
index 1940e64..40e2c77 100644
--- a/extras/indexing/src/main/java/org/apache/rya/indexing/pcj/matching/AccumuloIndexSetProvider.java
+++ b/extras/indexing/src/main/java/org/apache/rya/indexing/pcj/matching/AccumuloIndexSetProvider.java
@@ -52,11 +52,10 @@ import org.openrdf.query.QueryEvaluationException;
 import org.openrdf.query.algebra.TupleExpr;
 import org.openrdf.sail.SailException;
 
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
-import jline.internal.Preconditions;
-
 /**
  * Implementation of {@link ExternalSetProvider} that provides {@link ExternalTupleSet}s.
  * This provider uses either user specified Accumulo configuration information or user a specified

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationConfiguration.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationConfiguration.java b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationConfiguration.java
index d69efe5..ff58979 100644
--- a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationConfiguration.java
+++ b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationConfiguration.java
@@ -22,7 +22,7 @@ import java.util.Properties;
 
 import org.apache.rya.accumulo.AccumuloRdfConfiguration;
 
-import jline.internal.Preconditions;
+import com.google.common.base.Preconditions;
 
 /**
  * Configuration object for creating a {@link PeriodicNotificationApplication}.

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java
index c2e5ebf..3b639e9 100644
--- a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java
+++ b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaExporterExecutor.java
@@ -32,7 +32,7 @@ import org.apache.rya.periodic.notification.api.BindingSetRecord;
 import org.apache.rya.periodic.notification.api.LifeCycle;
 import org.openrdf.query.BindingSet;
 
-import jline.internal.Preconditions;
+import com.google.common.base.Preconditions;
 
 /**
  * Executor service that runs {@link KafkaPeriodicBindingSetExporter}s.  

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java
index 8a0322f..5397618 100644
--- a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java
+++ b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/exporter/KafkaPeriodicBindingSetExporter.java
@@ -36,7 +36,7 @@ import org.apache.rya.periodic.notification.api.BindingSetRecordExportException;
 import org.openrdf.model.Literal;
 import org.openrdf.query.BindingSet;
 
-import jline.internal.Preconditions;
+import com.google.common.base.Preconditions;
 
 /**
  * Object that exports {@link BindingSet}s to the Kafka topic indicated by

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/AccumuloBinPruner.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/AccumuloBinPruner.java b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/AccumuloBinPruner.java
index 4dac64c..a9403c2 100644
--- a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/AccumuloBinPruner.java
+++ b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/AccumuloBinPruner.java
@@ -24,7 +24,7 @@ import org.apache.rya.indexing.pcj.storage.PeriodicQueryStorageException;
 import org.apache.rya.periodic.notification.api.BinPruner;
 import org.apache.rya.periodic.notification.api.NodeBin;
 
-import jline.internal.Preconditions;
+import com.google.common.base.Preconditions;
 
 /**
  * Deletes BindingSets from time bins in the indicated PCJ table

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java
index 516690e..327154a 100644
--- a/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java
+++ b/extras/periodic.notification/service/src/main/java/org/apache/rya/periodic/notification/pruner/PeriodicQueryPruner.java
@@ -32,7 +32,7 @@ import org.apache.rya.indexing.pcj.fluo.app.util.PeriodicQueryUtil;
 import org.apache.rya.periodic.notification.api.BinPruner;
 import org.apache.rya.periodic.notification.api.NodeBin;
 
-import jline.internal.Preconditions;
+import com.google.common.base.Preconditions;
 
 /**
  * Implementation of {@link BinPruner} that deletes old, already processed

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/tests/pom.xml
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/tests/pom.xml b/extras/periodic.notification/tests/pom.xml
index 229a761..feb1f0f 100644
--- a/extras/periodic.notification/tests/pom.xml
+++ b/extras/periodic.notification/tests/pom.xml
@@ -26,6 +26,10 @@
     <dependencies>
         <dependency>
             <groupId>org.apache.rya</groupId>
+            <artifactId>rya.test.kafka</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.rya</groupId>
             <artifactId>rya.pcj.fluo.test.base</artifactId>
             <exclusions>
                 <exclusion>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java b/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java
index 9109775..3b6062f 100644
--- a/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java
+++ b/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/application/PeriodicNotificationApplicationIT.java
@@ -18,6 +18,9 @@
  */
 package org.apache.rya.periodic.notification.application;
 
+import static org.apache.rya.periodic.notification.application.PeriodicNotificationApplicationConfiguration.KAFKA_BOOTSTRAP_SERVERS;
+import static org.apache.rya.periodic.notification.application.PeriodicNotificationApplicationConfiguration.NOTIFICATION_TOPIC;
+
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -60,14 +63,14 @@ import org.apache.rya.indexing.pcj.fluo.app.util.FluoQueryUtils;
 import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
 import org.apache.rya.indexing.pcj.storage.PrecomputedJoinStorage.CloseableIterator;
 import org.apache.rya.indexing.pcj.storage.accumulo.AccumuloPeriodicQueryResultStorage;
-import org.apache.rya.kafka.base.EmbeddedKafkaInstance;
-import org.apache.rya.kafka.base.EmbeddedKafkaSingleton;
-import org.apache.rya.kafka.base.KafkaTestInstanceRule;
 import org.apache.rya.pcj.fluo.test.base.RyaExportITBase;
 import org.apache.rya.periodic.notification.notification.CommandNotification;
 import org.apache.rya.periodic.notification.registration.KafkaNotificationRegistrationClient;
 import org.apache.rya.periodic.notification.serialization.BindingSetSerDe;
 import org.apache.rya.periodic.notification.serialization.CommandNotificationSerializer;
+import org.apache.rya.test.kafka.EmbeddedKafkaInstance;
+import org.apache.rya.test.kafka.EmbeddedKafkaSingleton;
+import org.apache.rya.test.kafka.KafkaTestInstanceRule;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -85,10 +88,7 @@ import org.openrdf.query.algebra.evaluation.QueryBindingSet;
 
 import com.google.common.collect.HashMultimap;
 import com.google.common.collect.Multimap;
-import com.google.common.collect.Sets;
-
-import static org.apache.rya.periodic.notification.application.PeriodicNotificationApplicationConfiguration.NOTIFICATION_TOPIC;
-import static org.apache.rya.periodic.notification.application.PeriodicNotificationApplicationConfiguration.KAFKA_BOOTSTRAP_SERVERS;;
+import com.google.common.collect.Sets;;
 
 
 public class PeriodicNotificationApplicationIT extends RyaExportITBase {
@@ -101,62 +101,64 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
     private PeriodicNotificationApplicationConfiguration conf;
     private static EmbeddedKafkaInstance embeddedKafka = EmbeddedKafkaSingleton.getInstance();
     private static String bootstrapServers;
-    
+
     @Rule
     public KafkaTestInstanceRule rule = new KafkaTestInstanceRule(false);
-    
+
     @BeforeClass
     public static void initClass() {
         bootstrapServers = embeddedKafka.createBootstrapServerConfig().getProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG);
     }
-    
+
     @Before
     public void init() throws Exception {
-        String topic = rule.getKafkaTopicName();
+        final String topic = rule.getKafkaTopicName();
         rule.createTopic(topic);
-        
+
         //get user specified props and update with the embedded kafka bootstrap servers and rule generated topic
         props = getProps();
         props.setProperty(NOTIFICATION_TOPIC, topic);
         props.setProperty(KAFKA_BOOTSTRAP_SERVERS, bootstrapServers);
         conf = new PeriodicNotificationApplicationConfiguration(props);
-        
+
         //create Kafka Producer
         kafkaProps = getKafkaProperties(conf);
         producer = new KafkaProducer<>(kafkaProps, new StringSerializer(), new CommandNotificationSerializer());
-        
+
         //extract kafka specific properties from application config
         app = PeriodicNotificationApplicationFactory.getPeriodicApplication(props);
         registrar = new KafkaNotificationRegistrationClient(conf.getNotificationTopic(), producer);
     }
-    
+
     @Test
     public void periodicApplicationWithAggAndGroupByTest() throws Exception {
 
-        String sparql = "prefix function: <http://org.apache.rya/function#> " // n
+        final String sparql = "prefix function: <http://org.apache.rya/function#> " // n
                 + "prefix time: <http://www.w3.org/2006/time#> " // n
                 + "select ?type (count(?obs) as ?total) where {" // n
                 + "Filter(function:periodic(?time, 1, .25, time:minutes)) " // n
                 + "?obs <uri:hasTime> ?time. " // n
                 + "?obs <uri:hasObsType> ?type } group by ?type"; // n
-        
+
         //make data
-        int periodMult = 15;
+        final int periodMult = 15;
         final ValueFactory vf = new ValueFactoryImpl();
         final DatatypeFactory dtf = DatatypeFactory.newInstance();
         //Sleep until current time aligns nicely with period to makell
         //results more predictable
-        while(System.currentTimeMillis() % (periodMult*1000) > 500);
-        ZonedDateTime time = ZonedDateTime.now();
+        while(System.currentTimeMillis() % (periodMult*1000) > 500) {
+            ;
+        }
+        final ZonedDateTime time = ZonedDateTime.now();
 
-        ZonedDateTime zTime1 = time.minusSeconds(2*periodMult);
-        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime1 = time.minusSeconds(2*periodMult);
+        final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
 
-        ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
-        String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
+        final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
 
-        ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
-        String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
+        final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
 
         final Collection<Statement> statements = Sets.newHashSet(
                 vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"),
@@ -174,26 +176,26 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
                 vf.createStatement(vf.createURI("urn:obs_5"), vf.createURI("uri:hasTime"),
                         vf.createLiteral(dtf.newXMLGregorianCalendar(time3))),
                 vf.createStatement(vf.createURI("urn:obs_5"), vf.createURI("uri:hasObsType"), vf.createLiteral("automobile")));
-        
+
         try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
-            Connector connector = ConfigUtils.getConnector(conf);
-            PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
-            CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
-            String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
+            final Connector connector = ConfigUtils.getConnector(conf);
+            final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
+            final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
+            final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
             addData(statements);
             app.start();
-           
-            Multimap<Long, BindingSet> actual = HashMultimap.create();
+
+            final Multimap<Long, BindingSet> actual = HashMultimap.create();
             try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
                 consumer.subscribe(Arrays.asList(id));
-                long end = System.currentTimeMillis() + 4*periodMult*1000;
+                final long end = System.currentTimeMillis() + 4*periodMult*1000;
                 long lastBinId = 0L;
                 long binId = 0L;
-                List<Long> ids = new ArrayList<>();
+                final List<Long> ids = new ArrayList<>();
                 while (System.currentTimeMillis() < end) {
-                    ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult*1000);
-                    for(ConsumerRecord<String, BindingSet> record: records){
-                        BindingSet result = record.value();
+                    final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult*1000);
+                    for(final ConsumerRecord<String, BindingSet> record: records){
+                        final BindingSet result = record.value();
                         binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
                         if(lastBinId != binId) {
                             lastBinId = binId;
@@ -202,103 +204,105 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
                         actual.put(binId, result);
                     }
                 }
-                
-                Map<Long, Set<BindingSet>> expected = new HashMap<>();
-                
-                Set<BindingSet> expected1 = new HashSet<>();
-                QueryBindingSet bs1 = new QueryBindingSet();
+
+                final Map<Long, Set<BindingSet>> expected = new HashMap<>();
+
+                final Set<BindingSet> expected1 = new HashSet<>();
+                final QueryBindingSet bs1 = new QueryBindingSet();
                 bs1.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
                 bs1.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
                 bs1.addBinding("type", vf.createLiteral("airplane"));
-                
-                QueryBindingSet bs2 = new QueryBindingSet();
+
+                final QueryBindingSet bs2 = new QueryBindingSet();
                 bs2.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
                 bs2.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
                 bs2.addBinding("type", vf.createLiteral("ship"));
-                
-                QueryBindingSet bs3 = new QueryBindingSet();
+
+                final QueryBindingSet bs3 = new QueryBindingSet();
                 bs3.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(0)));
                 bs3.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
                 bs3.addBinding("type", vf.createLiteral("automobile"));
-                
+
                 expected1.add(bs1);
                 expected1.add(bs2);
                 expected1.add(bs3);
-                
-                Set<BindingSet> expected2 = new HashSet<>();
-                QueryBindingSet bs4 = new QueryBindingSet();
+
+                final Set<BindingSet> expected2 = new HashSet<>();
+                final QueryBindingSet bs4 = new QueryBindingSet();
                 bs4.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(1)));
                 bs4.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
                 bs4.addBinding("type", vf.createLiteral("airplane"));
-                
-                QueryBindingSet bs5 = new QueryBindingSet();
+
+                final QueryBindingSet bs5 = new QueryBindingSet();
                 bs5.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(1)));
                 bs5.addBinding("total", new LiteralImpl("2", XMLSchema.INTEGER));
                 bs5.addBinding("type", vf.createLiteral("ship"));
-                
+
                 expected2.add(bs4);
                 expected2.add(bs5);
-                
-                Set<BindingSet> expected3 = new HashSet<>();
-                QueryBindingSet bs6 = new QueryBindingSet();
+
+                final Set<BindingSet> expected3 = new HashSet<>();
+                final QueryBindingSet bs6 = new QueryBindingSet();
                 bs6.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(2)));
                 bs6.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
                 bs6.addBinding("type", vf.createLiteral("ship"));
-                
-                QueryBindingSet bs7 = new QueryBindingSet();
+
+                final QueryBindingSet bs7 = new QueryBindingSet();
                 bs7.addBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID, vf.createLiteral(ids.get(2)));
                 bs7.addBinding("total", new LiteralImpl("1", XMLSchema.INTEGER));
                 bs7.addBinding("type", vf.createLiteral("airplane"));
-                
+
                 expected3.add(bs6);
                 expected3.add(bs7);
-                
+
                 expected.put(ids.get(0), expected1);
                 expected.put(ids.get(1), expected2);
                 expected.put(ids.get(2), expected3);
-                
+
                 Assert.assertEquals(3, actual.asMap().size());
-                for(Long ident: ids) {
+                for(final Long ident: ids) {
                     Assert.assertEquals(expected.get(ident), actual.get(ident));
                 }
             }
-            
-            Set<BindingSet> expectedResults = new HashSet<>();
+
+            final Set<BindingSet> expectedResults = new HashSet<>();
             try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
                 results.forEachRemaining(x -> expectedResults.add(x));
                 Assert.assertEquals(0, expectedResults.size());
             }
         }
     }
-    
-    
+
+
     @Test
     public void periodicApplicationWithAggTest() throws Exception {
 
-        String sparql = "prefix function: <http://org.apache.rya/function#> " // n
+        final String sparql = "prefix function: <http://org.apache.rya/function#> " // n
                 + "prefix time: <http://www.w3.org/2006/time#> " // n
                 + "select (count(?obs) as ?total) where {" // n
                 + "Filter(function:periodic(?time, 1, .25, time:minutes)) " // n
                 + "?obs <uri:hasTime> ?time. " // n
                 + "?obs <uri:hasId> ?id } "; // n
-        
+
         //make data
-        int periodMult = 15;
+        final int periodMult = 15;
         final ValueFactory vf = new ValueFactoryImpl();
         final DatatypeFactory dtf = DatatypeFactory.newInstance();
         //Sleep until current time aligns nicely with period to make
         //results more predictable
-        while(System.currentTimeMillis() % (periodMult*1000) > 500);
-        ZonedDateTime time = ZonedDateTime.now();
+        while(System.currentTimeMillis() % (periodMult*1000) > 500) {
+            ;
+        }
+        final ZonedDateTime time = ZonedDateTime.now();
 
-        ZonedDateTime zTime1 = time.minusSeconds(2*periodMult);
-        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime1 = time.minusSeconds(2*periodMult);
+        final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
 
-        ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
-        String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
+        final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
 
-        ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
-        String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
+        final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
 
         final Collection<Statement> statements = Sets.newHashSet(
                 vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"),
@@ -310,26 +314,26 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
                 vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"),
                         vf.createLiteral(dtf.newXMLGregorianCalendar(time3))),
                 vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")));
-        
+
         try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
-            Connector connector = ConfigUtils.getConnector(conf);
-            PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
-            CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
-            String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
+            final Connector connector = ConfigUtils.getConnector(conf);
+            final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
+            final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
+            final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
             addData(statements);
             app.start();
-            
-            Multimap<Long, BindingSet> expected = HashMultimap.create();
+
+            final Multimap<Long, BindingSet> expected = HashMultimap.create();
             try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
                 consumer.subscribe(Arrays.asList(id));
-                long end = System.currentTimeMillis() + 4*periodMult*1000;
+                final long end = System.currentTimeMillis() + 4*periodMult*1000;
                 long lastBinId = 0L;
                 long binId = 0L;
-                List<Long> ids = new ArrayList<>();
+                final List<Long> ids = new ArrayList<>();
                 while (System.currentTimeMillis() < end) {
-                    ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult*1000);
-                    for(ConsumerRecord<String, BindingSet> record: records){
-                        BindingSet result = record.value();
+                    final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult*1000);
+                    for(final ConsumerRecord<String, BindingSet> record: records){
+                        final BindingSet result = record.value();
                         binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
                         if(lastBinId != binId) {
                             lastBinId = binId;
@@ -338,21 +342,21 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
                         expected.put(binId, result);
                     }
                 }
-                
+
                 Assert.assertEquals(3, expected.asMap().size());
                 int i = 0;
-                for(Long ident: ids) {
+                for(final Long ident: ids) {
                     Assert.assertEquals(1, expected.get(ident).size());
-                    BindingSet bs = expected.get(ident).iterator().next();
-                    Value val = bs.getValue("total");
-                    int total = Integer.parseInt(val.stringValue());
+                    final BindingSet bs = expected.get(ident).iterator().next();
+                    final Value val = bs.getValue("total");
+                    final int total = Integer.parseInt(val.stringValue());
                     Assert.assertEquals(3-i, total);
                     i++;
                 }
             }
-            
-            
-            Set<BindingSet> expectedResults = new HashSet<>();
+
+
+            final Set<BindingSet> expectedResults = new HashSet<>();
             try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
                 results.forEachRemaining(x -> expectedResults.add(x));
                 Assert.assertEquals(0, expectedResults.size());
@@ -360,35 +364,37 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
         }
 
     }
-    
-    
+
+
     @Test
     public void periodicApplicationTest() throws Exception {
 
-        String sparql = "prefix function: <http://org.apache.rya/function#> " // n
+        final String sparql = "prefix function: <http://org.apache.rya/function#> " // n
                 + "prefix time: <http://www.w3.org/2006/time#> " // n
                 + "select ?obs ?id where {" // n
                 + "Filter(function:periodic(?time, 1, .25, time:minutes)) " // n
                 + "?obs <uri:hasTime> ?time. " // n
                 + "?obs <uri:hasId> ?id } "; // n
-        
+
         //make data
-        int periodMult = 15;
+        final int periodMult = 15;
         final ValueFactory vf = new ValueFactoryImpl();
         final DatatypeFactory dtf = DatatypeFactory.newInstance();
         //Sleep until current time aligns nicely with period to make
         //results more predictable
-        while(System.currentTimeMillis() % (periodMult*1000) > 500);
-        ZonedDateTime time = ZonedDateTime.now();
+        while(System.currentTimeMillis() % (periodMult*1000) > 500) {
+            ;
+        }
+        final ZonedDateTime time = ZonedDateTime.now();
 
-        ZonedDateTime zTime1 = time.minusSeconds(2*periodMult);
-        String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime1 = time.minusSeconds(2*periodMult);
+        final String time1 = zTime1.format(DateTimeFormatter.ISO_INSTANT);
 
-        ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
-        String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime2 = zTime1.minusSeconds(periodMult);
+        final String time2 = zTime2.format(DateTimeFormatter.ISO_INSTANT);
 
-        ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
-        String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
+        final ZonedDateTime zTime3 = zTime2.minusSeconds(periodMult);
+        final String time3 = zTime3.format(DateTimeFormatter.ISO_INSTANT);
 
         final Collection<Statement> statements = Sets.newHashSet(
                 vf.createStatement(vf.createURI("urn:obs_1"), vf.createURI("uri:hasTime"),
@@ -400,26 +406,26 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
                 vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasTime"),
                         vf.createLiteral(dtf.newXMLGregorianCalendar(time3))),
                 vf.createStatement(vf.createURI("urn:obs_3"), vf.createURI("uri:hasId"), vf.createLiteral("id_3")));
-        
+
         try (FluoClient fluo = FluoClientFactory.getFluoClient(conf.getFluoAppName(), Optional.of(conf.getFluoTableName()), conf)) {
-            Connector connector = ConfigUtils.getConnector(conf);
-            PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
-            CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
-            String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
+            final Connector connector = ConfigUtils.getConnector(conf);
+            final PeriodicQueryResultStorage storage = new AccumuloPeriodicQueryResultStorage(connector, conf.getTablePrefix());
+            final CreatePeriodicQuery periodicQuery = new CreatePeriodicQuery(fluo, storage);
+            final String id = FluoQueryUtils.convertFluoQueryIdToPcjId(periodicQuery.createPeriodicQuery(sparql, registrar).getQueryId());
             addData(statements);
             app.start();
-           
-            Multimap<Long, BindingSet> expected = HashMultimap.create();
+
+            final Multimap<Long, BindingSet> expected = HashMultimap.create();
             try (KafkaConsumer<String, BindingSet> consumer = new KafkaConsumer<>(kafkaProps, new StringDeserializer(), new BindingSetSerDe())) {
                 consumer.subscribe(Arrays.asList(id));
-                long end = System.currentTimeMillis() + 4*periodMult*1000;
+                final long end = System.currentTimeMillis() + 4*periodMult*1000;
                 long lastBinId = 0L;
                 long binId = 0L;
-                List<Long> ids = new ArrayList<>();
+                final List<Long> ids = new ArrayList<>();
                 while (System.currentTimeMillis() < end) {
-                    ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult*1000);
-                    for(ConsumerRecord<String, BindingSet> record: records){
-                        BindingSet result = record.value();
+                    final ConsumerRecords<String, BindingSet> records = consumer.poll(periodMult*1000);
+                    for(final ConsumerRecord<String, BindingSet> record: records){
+                        final BindingSet result = record.value();
                         binId = Long.parseLong(result.getBinding(IncrementalUpdateConstants.PERIODIC_BIN_ID).getValue().stringValue());
                         if(lastBinId != binId) {
                             lastBinId = binId;
@@ -428,17 +434,17 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
                         expected.put(binId, result);
                     }
                 }
-                
+
                 Assert.assertEquals(3, expected.asMap().size());
                 int i = 0;
-                for(Long ident: ids) {
+                for(final Long ident: ids) {
                     Assert.assertEquals(3-i, expected.get(ident).size());
                     i++;
                 }
             }
-            
-            
-            Set<BindingSet> expectedResults = new HashSet<>();
+
+
+            final Set<BindingSet> expectedResults = new HashSet<>();
             try (CloseableIterator<BindingSet> results = storage.listResults(id, Optional.empty())) {
                 results.forEachRemaining(x -> expectedResults.add(x));
                 Assert.assertEquals(0, expectedResults.size());
@@ -446,40 +452,40 @@ public class PeriodicNotificationApplicationIT extends RyaExportITBase {
         }
 
     }
-    
-    
+
+
     @After
     public void shutdown() {
         registrar.close();
         app.stop();
     }
-    
-    private void addData(Collection<Statement> statements) throws DatatypeConfigurationException {
+
+    private void addData(final Collection<Statement> statements) throws DatatypeConfigurationException {
         // add statements to Fluo
         try (FluoClient fluo = new FluoClientImpl(getFluoConfiguration())) {
-            InsertTriples inserter = new InsertTriples();
+            final InsertTriples inserter = new InsertTriples();
             statements.forEach(x -> inserter.insert(fluo, RdfToRyaConversions.convertStatement(x)));
             getMiniFluo().waitForObservers();
         }
     }
 
-    private static Properties getKafkaProperties(PeriodicNotificationApplicationConfiguration conf) { 
-        Properties kafkaProps = new Properties();
+    private static Properties getKafkaProperties(final PeriodicNotificationApplicationConfiguration conf) {
+        final Properties kafkaProps = new Properties();
         kafkaProps.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
         kafkaProps.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, UUID.randomUUID().toString());
         kafkaProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, conf.getNotificationGroupId());
         kafkaProps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
         return kafkaProps;
     }
-    
+
     private Properties getProps() throws IOException {
-        
-        Properties props = new Properties();
+
+        final Properties props = new Properties();
         try(InputStream in = new FileInputStream("src/test/resources/notification.properties")) {
             props.load(in);
-        } 
-        
-        FluoConfiguration fluoConf = getFluoConfiguration();
+        }
+
+        final FluoConfiguration fluoConf = getFluoConfiguration();
         props.setProperty("accumulo.user", getUsername());
         props.setProperty("accumulo.password", getPassword());
         props.setProperty("accumulo.instance", getMiniAccumuloCluster().getInstanceName());

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java b/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java
index 874e7e2..82338b9 100644
--- a/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java
+++ b/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/exporter/PeriodicNotificationExporterIT.java
@@ -34,10 +34,10 @@ import org.apache.kafka.clients.producer.ProducerConfig;
 import org.apache.kafka.common.serialization.StringDeserializer;
 import org.apache.kafka.common.serialization.StringSerializer;
 import org.apache.rya.indexing.pcj.storage.PeriodicQueryResultStorage;
-import org.apache.rya.kafka.base.KafkaITBase;
-import org.apache.rya.kafka.base.KafkaTestInstanceRule;
 import org.apache.rya.periodic.notification.api.BindingSetRecord;
 import org.apache.rya.periodic.notification.serialization.BindingSetSerDe;
+import org.apache.rya.test.kafka.KafkaITBase;
+import org.apache.rya.test.kafka.KafkaTestInstanceRule;
 import org.junit.Assert;
 import org.junit.Rule;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java
----------------------------------------------------------------------
diff --git a/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java b/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java
index 522e69d..1fb6167 100644
--- a/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java
+++ b/extras/periodic.notification/tests/src/test/java/org/apache/rya/periodic/notification/registration/kafka/PeriodicCommandNotificationConsumerIT.java
@@ -30,13 +30,13 @@ import org.apache.kafka.clients.producer.ProducerConfig;
 import org.apache.kafka.common.serialization.StringDeserializer;
 import org.apache.kafka.common.serialization.StringSerializer;
 import org.apache.log4j.BasicConfigurator;
-import org.apache.rya.kafka.base.KafkaITBase;
-import org.apache.rya.kafka.base.KafkaTestInstanceRule;
 import org.apache.rya.periodic.notification.coordinator.PeriodicNotificationCoordinatorExecutor;
 import org.apache.rya.periodic.notification.notification.CommandNotification;
 import org.apache.rya.periodic.notification.notification.TimestampedNotification;
 import org.apache.rya.periodic.notification.registration.KafkaNotificationRegistrationClient;
 import org.apache.rya.periodic.notification.serialization.CommandNotificationSerializer;
+import org.apache.rya.test.kafka.KafkaITBase;
+import org.apache.rya.test.kafka.KafkaTestInstanceRule;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
@@ -48,10 +48,10 @@ public class PeriodicCommandNotificationConsumerIT extends KafkaITBase {
     private PeriodicNotificationCoordinatorExecutor coord;
     private KafkaNotificationProvider provider;
     private String bootstrapServer;
-    
+
     @Rule
     public KafkaTestInstanceRule rule = new KafkaTestInstanceRule(false);
-    
+
     @Before
     public void init() throws Exception {
         bootstrapServer = createBootstrapServerConfig().getProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG);
@@ -62,12 +62,12 @@ public class PeriodicCommandNotificationConsumerIT extends KafkaITBase {
 
         BasicConfigurator.configure();
 
-        BlockingQueue<TimestampedNotification> notifications = new LinkedBlockingQueue<>();
-        Properties props = createKafkaConfig();
-        KafkaProducer<String, CommandNotification> producer = new KafkaProducer<>(props);
-        String topic = rule.getKafkaTopicName();
+        final BlockingQueue<TimestampedNotification> notifications = new LinkedBlockingQueue<>();
+        final Properties props = createKafkaConfig();
+        final KafkaProducer<String, CommandNotification> producer = new KafkaProducer<>(props);
+        final String topic = rule.getKafkaTopicName();
         rule.createTopic(topic);
-        
+
         registration = new KafkaNotificationRegistrationClient(topic, producer);
         coord = new PeriodicNotificationCoordinatorExecutor(1, notifications);
         provider = new KafkaNotificationProvider(topic, new StringDeserializer(), new CommandNotificationSerializer(), props, coord, 1);
@@ -80,11 +80,11 @@ public class PeriodicCommandNotificationConsumerIT extends KafkaITBase {
 
         registration.deleteNotification("1");
         Thread.sleep(2000);
-        int size = notifications.size();
+        final int size = notifications.size();
         // sleep for 2 seconds to ensure no more messages being produced
         Thread.sleep(2000);
         Assert.assertEquals(size, notifications.size());
-        
+
         tearDown();
     }
 
@@ -93,12 +93,12 @@ public class PeriodicCommandNotificationConsumerIT extends KafkaITBase {
 
         BasicConfigurator.configure();
 
-        BlockingQueue<TimestampedNotification> notifications = new LinkedBlockingQueue<>();
-        Properties props = createKafkaConfig();
-        KafkaProducer<String, CommandNotification> producer = new KafkaProducer<>(props);
-        String topic = rule.getKafkaTopicName();
+        final BlockingQueue<TimestampedNotification> notifications = new LinkedBlockingQueue<>();
+        final Properties props = createKafkaConfig();
+        final KafkaProducer<String, CommandNotification> producer = new KafkaProducer<>(props);
+        final String topic = rule.getKafkaTopicName();
         rule.createTopic(topic);
-        
+
         registration = new KafkaNotificationRegistrationClient(topic, producer);
         coord = new PeriodicNotificationCoordinatorExecutor(1, notifications);
         provider = new KafkaNotificationProvider(topic, new StringDeserializer(), new CommandNotificationSerializer(), props, coord, 1);
@@ -111,11 +111,11 @@ public class PeriodicCommandNotificationConsumerIT extends KafkaITBase {
 
         registration.deleteNotification("1");
         Thread.sleep(2000);
-        int size = notifications.size();
+        final int size = notifications.size();
         // sleep for 2 seconds to ensure no more messages being produced
         Thread.sleep(2000);
         Assert.assertEquals(size, notifications.size());
-        
+
         tearDown();
     }
 
@@ -126,7 +126,7 @@ public class PeriodicCommandNotificationConsumerIT extends KafkaITBase {
     }
 
     private Properties createKafkaConfig() {
-        Properties props = new Properties();
+        final Properties props = new Properties();
         props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
         props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString());
         props.setProperty(ConsumerConfig.CLIENT_ID_CONFIG, "consumer0");

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/AbstractSpanBatchInformation.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/AbstractSpanBatchInformation.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/AbstractSpanBatchInformation.java
index 498dd85..4933d57 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/AbstractSpanBatchInformation.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/AbstractSpanBatchInformation.java
@@ -23,7 +23,7 @@ import java.util.Objects;
 import org.apache.fluo.api.data.Column;
 import org.apache.fluo.api.data.Span;
 
-import jline.internal.Preconditions;
+import com.google.common.base.Preconditions;
 
 /**
  * Abstract class for generating span based notifications.  A spanned notification

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/JoinBatchInformation.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/JoinBatchInformation.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/JoinBatchInformation.java
index d049ff0..3354fdc 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/JoinBatchInformation.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/batch/JoinBatchInformation.java
@@ -27,7 +27,7 @@ import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder;
 import org.apache.rya.indexing.pcj.storage.accumulo.VisibilityBindingSet;
 import org.openrdf.query.Binding;
 
-import jline.internal.Preconditions;
+import com.google.common.base.Preconditions;
 
 /**
  * This class updates join results based on parameters specified for the join's

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java
index aab3929..8686c85 100644
--- a/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java
+++ b/extras/rya.pcj.fluo/pcj.fluo.app/src/main/java/org/apache/rya/indexing/pcj/fluo/app/export/kafka/KafkaExportParameterBase.java
@@ -26,7 +26,7 @@ import org.apache.fluo.api.observer.Observer;
 import org.apache.kafka.clients.producer.ProducerConfig;
 import org.apache.rya.indexing.pcj.fluo.app.export.ParametersBase;
 
-import jline.internal.Preconditions;
+import com.google.common.base.Preconditions;
 
 /**
  * Provides read/write functions to the parameters map that is passed into an

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaInstance.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaInstance.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaInstance.java
deleted file mode 100644
index 97d8b90..0000000
--- a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaInstance.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.kafka.base;
-
-import java.nio.file.Files;
-import java.util.Properties;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.fluo.core.util.PortUtils;
-import org.apache.kafka.clients.CommonClientConfigs;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import kafka.server.KafkaConfig;
-import kafka.server.KafkaConfig$;
-import kafka.server.KafkaServer;
-import kafka.utils.MockTime;
-import kafka.utils.TestUtils;
-import kafka.utils.Time;
-import kafka.zk.EmbeddedZookeeper;
-
-/**
- * This class provides a {@link KafkaServer} and a dedicated
- * {@link EmbeddedZookeeper} server for integtration testing. Both servers use a
- * random free port, so it is necesssary to use the
- * {@link #getZookeeperConnect()} and {@link #createBootstrapServerConfig()}
- * methods to determine how to connect to them.
- *
- */
-public class EmbeddedKafkaInstance {
-
-    private static final Logger logger = LoggerFactory.getLogger(EmbeddedKafkaInstance.class);
-
-    private static final AtomicInteger KAFKA_TOPIC_COUNTER = new AtomicInteger(1);
-    private static final String IPv4_LOOPBACK = "127.0.0.1";
-    private static final String ZKHOST = IPv4_LOOPBACK;
-    private static final String BROKERHOST = IPv4_LOOPBACK;
-    private KafkaServer kafkaServer;
-    private EmbeddedZookeeper zkServer;
-    private String brokerPort;
-    private String zookeperConnect;
-
-    /**
-     * Starts the Embedded Kafka and Zookeeper Servers.
-     * @throws Exception - If an exeption occurs during startup.
-     */
-    protected void startup() throws Exception {
-        // Setup the embedded zookeeper
-        logger.info("Starting up Embedded Zookeeper...");
-        zkServer = new EmbeddedZookeeper();
-        zookeperConnect = ZKHOST + ":" + zkServer.port();
-        logger.info("Embedded Zookeeper started at: {}", zookeperConnect);
-
-        // setup Broker
-        logger.info("Starting up Embedded Kafka...");
-        brokerPort = Integer.toString(PortUtils.getRandomFreePort());
-        final Properties brokerProps = new Properties();
-        brokerProps.setProperty(KafkaConfig$.MODULE$.BrokerIdProp(), "0");
-        brokerProps.setProperty(KafkaConfig$.MODULE$.HostNameProp(), BROKERHOST);
-        brokerProps.setProperty(KafkaConfig$.MODULE$.PortProp(), brokerPort);
-        brokerProps.setProperty(KafkaConfig$.MODULE$.ZkConnectProp(), zookeperConnect);
-        brokerProps.setProperty(KafkaConfig$.MODULE$.LogDirsProp(), Files.createTempDirectory(getClass().getSimpleName() + "-").toAbsolutePath().toString());
-        final KafkaConfig config = new KafkaConfig(brokerProps);
-        final Time mock = new MockTime();
-        kafkaServer = TestUtils.createServer(config, mock);
-        logger.info("Embedded Kafka Server started at: {}:{}", BROKERHOST, brokerPort);
-    }
-
-    /**
-     * Shutdown the Embedded Kafka and Zookeeper.
-     * @throws Exception
-     */
-    protected void shutdown() throws Exception {
-        try {
-            if(kafkaServer != null) {
-                kafkaServer.shutdown();
-            }
-        } finally {
-            if(zkServer != null) {
-                zkServer.shutdown();
-            }
-        }
-    }
-
-    /**
-     * @return A new Property object containing the correct value of
-     *         {@link CommonClientConfigs#BOOTSTRAP_SERVERS_CONFIG}, for
-     *         connecting to this instance.
-     */
-    public Properties createBootstrapServerConfig() {
-        final Properties config = new Properties();
-        config.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, BROKERHOST + ":" + brokerPort);
-        return config;
-    }
-
-    /**
-     *
-     * @return The host of the Kafka Broker.
-     */
-    public String getBrokerHost() {
-        return BROKERHOST;
-    }
-
-    /**
-     *
-     * @return The port of the Kafka Broker.
-     */
-    public String getBrokerPort() {
-        return brokerPort;
-    }
-
-    /**
-     *
-     * @return The Zookeeper Connect String.
-     */
-    public String getZookeeperConnect() {
-        return zookeperConnect;
-    }
-
-    /**
-     *
-     * @return A unique Kafka topic name for this instance.
-     */
-    public String getUniqueTopicName() {
-        return "topic_" + KAFKA_TOPIC_COUNTER.getAndIncrement() + "_";
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaSingleton.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaSingleton.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaSingleton.java
deleted file mode 100644
index 933377b..0000000
--- a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/EmbeddedKafkaSingleton.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.kafka.base;
-
-import java.io.IOException;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Provides a singleton instance of an {@link EmbeddedKafkaInstance} and
- * includes a shutdown hook to ensure any open resources are closed on JVM exit.
- * <p>
- * This class is derived from MiniAccumuloSingleton.
- */
-public class EmbeddedKafkaSingleton {
-
-    public static EmbeddedKafkaInstance getInstance() {
-        return InstanceHolder.SINGLETON.instance;
-    }
-
-    private EmbeddedKafkaSingleton() {
-        // hiding implicit default constructor
-    }
-
-    private enum InstanceHolder {
-
-        SINGLETON;
-
-        private final Logger log;
-        private final EmbeddedKafkaInstance instance;
-
-        InstanceHolder() {
-            this.log = LoggerFactory.getLogger(EmbeddedKafkaInstance.class);
-            this.instance = new EmbeddedKafkaInstance();
-            try {
-                this.instance.startup();
-
-                // JUnit does not have an overall lifecycle event for tearing down
-                // this kind of resource, but shutdown hooks work alright in practice
-                // since this should only be used during testing
-
-                // The only other alternative for lifecycle management is to use a
-                // suite lifecycle to enclose the tests that need this resource.
-                // In practice this becomes unwieldy.
-
-                Runtime.getRuntime().addShutdownHook(new Thread() {
-                    @Override
-                    public void run() {
-                        try {
-                            InstanceHolder.this.instance.shutdown();
-                        } catch (final Throwable t) {
-                            // logging frameworks will likely be shut down
-                            t.printStackTrace(System.err);
-                        }
-                    }
-                });
-
-            } catch (final InterruptedException e) {
-                Thread.currentThread().interrupt();
-                log.error("Interrupted while starting EmbeddedKafkaInstance", e);
-            } catch (final IOException e) {
-                log.error("Unexpected error while starting EmbeddedKafkaInstance", e);
-            } catch (final Throwable e) {
-                // catching throwable because failure to construct an enum
-                // instance will lead to another error being thrown downstream
-                log.error("Unexpected throwable while starting EmbeddedKafkaInstance", e);
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java
deleted file mode 100644
index da4526c..0000000
--- a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaITBase.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.kafka.base;
-
-import java.util.Properties;
-
-/**
- * A class intended to be extended for Kafka Integration tests.
- */
-public class KafkaITBase {
-
-    private static EmbeddedKafkaInstance embeddedKafka = EmbeddedKafkaSingleton.getInstance();
-
-    /**
-     * @return A new Property object containing the correct value for Kafka's
-     *         {@link CommonClientConfigs#BOOTSTRAP_SERVERS_CONFIG}.
-     */
-    protected Properties createBootstrapServerConfig() {
-        return embeddedKafka.createBootstrapServerConfig();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaTestInstanceRule.java
----------------------------------------------------------------------
diff --git a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaTestInstanceRule.java b/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaTestInstanceRule.java
deleted file mode 100644
index a9ee7b5..0000000
--- a/extras/rya.pcj.fluo/pcj.fluo.test.base/src/main/java/org/apache/rya/kafka/base/KafkaTestInstanceRule.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.rya.kafka.base;
-
-import java.util.Properties;
-
-import org.I0Itec.zkclient.ZkClient;
-import org.junit.rules.ExternalResource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import kafka.admin.AdminUtils;
-import kafka.admin.RackAwareMode;
-import kafka.utils.ZKStringSerializer$;
-import kafka.utils.ZkUtils;
-
-
-/**
- * Provides a JUnit Rule for interacting with the {@link EmbeddedKafkaSingleton}.
- *
- */
-public class KafkaTestInstanceRule extends ExternalResource {
-    private static final Logger logger = LoggerFactory.getLogger(KafkaTestInstanceRule.class);
-    private static final EmbeddedKafkaInstance kafkaInstance = EmbeddedKafkaSingleton.getInstance();
-    private String kafkaTopicName;
-    private final boolean createTopic;
-
-    /**
-     * @param createTopic - If true, a topic shall be created for the value
-     *            provided by {@link #getKafkaTopicName()}. If false, no topics
-     *            shall be created.
-     */
-    public KafkaTestInstanceRule(final boolean createTopic) {
-        this.createTopic = createTopic;
-    }
-
-    /**
-     * @return A unique topic name for this test execution. If multiple topics are required by a test, use this value as
-     *         a prefix.
-     */
-    public String getKafkaTopicName() {
-        if (kafkaTopicName == null) {
-            throw new IllegalStateException("Cannot get Kafka Topic Name outside of a test execution.");
-        }
-        return kafkaTopicName;
-    }
-
-    @Override
-    protected void before() throws Throwable {
-        // Get the next kafka topic name.
-        kafkaTopicName = kafkaInstance.getUniqueTopicName();
-
-        if(createTopic) {
-            createTopic(kafkaTopicName);
-        }
-    }
-
-    @Override
-    protected void after() {
-        kafkaTopicName = null;
-    }
-
-    /**
-     * Utility method to provide additional unique topics if they are required.
-     * @param topicName - The Kafka topic to create.
-     */
-    public void createTopic(final String topicName) {
-        // Setup Kafka.
-        ZkUtils zkUtils = null;
-        try {
-            logger.info("Creating Kafka Topic: '{}'", topicName);
-            zkUtils = ZkUtils.apply(new ZkClient(kafkaInstance.getZookeeperConnect(), 30000, 30000, ZKStringSerializer$.MODULE$), false);
-            AdminUtils.createTopic(zkUtils, topicName, 1, 1, new Properties(), RackAwareMode.Disabled$.MODULE$);
-        }
-        finally {
-            if(zkUtils != null) {
-                zkUtils.close();
-            }
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 92cfba8..3dc7c68 100644
--- a/pom.xml
+++ b/pom.xml
@@ -65,6 +65,7 @@ under the License.
         <module>pig</module>
         <module>sail</module>
         <module>spark</module>
+        <module>test</module>
         <module>web</module>
     </modules>
     <properties>
@@ -280,6 +281,16 @@ under the License.
                 <version>${project.version}</version>
             </dependency>
             <dependency>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>rya.test.parent</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.rya</groupId>
+                <artifactId>rya.test.kafka</artifactId>
+                <version>${project.version}</version>
+            </dependency>
+            <dependency>
                 <groupId>org.apache.accumulo</groupId>
                 <artifactId>accumulo-core</artifactId>
                 <version>${accumulo.version}</version>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/test/kafka/pom.xml
----------------------------------------------------------------------
diff --git a/test/kafka/pom.xml b/test/kafka/pom.xml
new file mode 100644
index 0000000..44773a7
--- /dev/null
+++ b/test/kafka/pom.xml
@@ -0,0 +1,81 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+    <parent>
+        <groupId>org.apache.rya</groupId>
+        <artifactId>rya.test.parent</artifactId>
+        <version>3.2.12-incubating-SNAPSHOT</version>
+    </parent>
+    
+    <modelVersion>4.0.0</modelVersion>
+    <artifactId>rya.test.kafka</artifactId>
+    
+    <name>Apache Rya Test Kafka</name>
+    <description>
+        This module contains the Rya Test Kakfa components that help write Kafka
+        based integration tests.
+    </description>
+
+    <dependencies>
+        <!-- Kafka dependencies. -->
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+        </dependency>
+        
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka-clients</artifactId>
+            <classifier>test</classifier>
+        </dependency>
+        
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.11</artifactId>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-log4j12</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.11</artifactId>
+            <classifier>test</classifier>
+            <scope>compile</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-log4j12</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+    
+        <!-- Testing dependencies. -->
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>compile</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/test/kafka/src/main/java/org/apache/rya/test/kafka/EmbeddedKafkaInstance.java
----------------------------------------------------------------------
diff --git a/test/kafka/src/main/java/org/apache/rya/test/kafka/EmbeddedKafkaInstance.java b/test/kafka/src/main/java/org/apache/rya/test/kafka/EmbeddedKafkaInstance.java
new file mode 100644
index 0000000..c7c5929
--- /dev/null
+++ b/test/kafka/src/main/java/org/apache/rya/test/kafka/EmbeddedKafkaInstance.java
@@ -0,0 +1,142 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.test.kafka;
+
+import java.nio.file.Files;
+import java.util.Properties;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import kafka.server.KafkaConfig;
+import kafka.server.KafkaConfig$;
+import kafka.server.KafkaServer;
+import kafka.utils.MockTime;
+import kafka.utils.TestUtils;
+import kafka.utils.Time;
+import kafka.zk.EmbeddedZookeeper;
+
+/**
+ * This class provides a {@link KafkaServer} and a dedicated
+ * {@link EmbeddedZookeeper} server for integtration testing. Both servers use a
+ * random free port, so it is necesssary to use the
+ * {@link #getZookeeperConnect()} and {@link #createBootstrapServerConfig()}
+ * methods to determine how to connect to them.
+ *
+ */
+public class EmbeddedKafkaInstance {
+
+    private static final Logger logger = LoggerFactory.getLogger(EmbeddedKafkaInstance.class);
+
+    private static final AtomicInteger KAFKA_TOPIC_COUNTER = new AtomicInteger(1);
+    private static final String IPv4_LOOPBACK = "127.0.0.1";
+    private static final String ZKHOST = IPv4_LOOPBACK;
+    private static final String BROKERHOST = IPv4_LOOPBACK;
+    private KafkaServer kafkaServer;
+    private EmbeddedZookeeper zkServer;
+    private String brokerPort;
+    private String zookeperConnect;
+
+    /**
+     * Starts the Embedded Kafka and Zookeeper Servers.
+     * @throws Exception - If an exeption occurs during startup.
+     */
+    protected void startup() throws Exception {
+        // Setup the embedded zookeeper
+        logger.info("Starting up Embedded Zookeeper...");
+        zkServer = new EmbeddedZookeeper();
+        zookeperConnect = ZKHOST + ":" + zkServer.port();
+        logger.info("Embedded Zookeeper started at: {}", zookeperConnect);
+
+        // setup Broker
+        logger.info("Starting up Embedded Kafka...");
+        brokerPort = Integer.toString(PortUtils.getRandomFreePort());
+        final Properties brokerProps = new Properties();
+        brokerProps.setProperty(KafkaConfig$.MODULE$.BrokerIdProp(), "0");
+        brokerProps.setProperty(KafkaConfig$.MODULE$.HostNameProp(), BROKERHOST);
+        brokerProps.setProperty(KafkaConfig$.MODULE$.PortProp(), brokerPort);
+        brokerProps.setProperty(KafkaConfig$.MODULE$.ZkConnectProp(), zookeperConnect);
+        brokerProps.setProperty(KafkaConfig$.MODULE$.LogDirsProp(), Files.createTempDirectory(getClass().getSimpleName() + "-").toAbsolutePath().toString());
+        final KafkaConfig config = new KafkaConfig(brokerProps);
+        final Time mock = new MockTime();
+        kafkaServer = TestUtils.createServer(config, mock);
+        logger.info("Embedded Kafka Server started at: {}:{}", BROKERHOST, brokerPort);
+    }
+
+    /**
+     * Shutdown the Embedded Kafka and Zookeeper.
+     * @throws Exception
+     */
+    protected void shutdown() throws Exception {
+        try {
+            if(kafkaServer != null) {
+                kafkaServer.shutdown();
+            }
+        } finally {
+            if(zkServer != null) {
+                zkServer.shutdown();
+            }
+        }
+    }
+
+    /**
+     * @return A new Property object containing the correct value of
+     *         {@link CommonClientConfigs#BOOTSTRAP_SERVERS_CONFIG}, for
+     *         connecting to this instance.
+     */
+    public Properties createBootstrapServerConfig() {
+        final Properties config = new Properties();
+        config.setProperty(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, BROKERHOST + ":" + brokerPort);
+        return config;
+    }
+
+    /**
+     *
+     * @return The host of the Kafka Broker.
+     */
+    public String getBrokerHost() {
+        return BROKERHOST;
+    }
+
+    /**
+     *
+     * @return The port of the Kafka Broker.
+     */
+    public String getBrokerPort() {
+        return brokerPort;
+    }
+
+    /**
+     *
+     * @return The Zookeeper Connect String.
+     */
+    public String getZookeeperConnect() {
+        return zookeperConnect;
+    }
+
+    /**
+     *
+     * @return A unique Kafka topic name for this instance.
+     */
+    public String getUniqueTopicName() {
+        return "topic_" + KAFKA_TOPIC_COUNTER.getAndIncrement() + "_";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/test/kafka/src/main/java/org/apache/rya/test/kafka/EmbeddedKafkaSingleton.java
----------------------------------------------------------------------
diff --git a/test/kafka/src/main/java/org/apache/rya/test/kafka/EmbeddedKafkaSingleton.java b/test/kafka/src/main/java/org/apache/rya/test/kafka/EmbeddedKafkaSingleton.java
new file mode 100644
index 0000000..3a930ee
--- /dev/null
+++ b/test/kafka/src/main/java/org/apache/rya/test/kafka/EmbeddedKafkaSingleton.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.test.kafka;
+
+import java.io.IOException;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Provides a singleton instance of an {@link EmbeddedKafkaInstance} and
+ * includes a shutdown hook to ensure any open resources are closed on JVM exit.
+ * <p>
+ * This class is derived from MiniAccumuloSingleton.
+ */
+public class EmbeddedKafkaSingleton {
+
+    public static EmbeddedKafkaInstance getInstance() {
+        return InstanceHolder.SINGLETON.instance;
+    }
+
+    private EmbeddedKafkaSingleton() {
+        // hiding implicit default constructor
+    }
+
+    private enum InstanceHolder {
+
+        SINGLETON;
+
+        private final Logger log;
+        private final EmbeddedKafkaInstance instance;
+
+        InstanceHolder() {
+            this.log = LoggerFactory.getLogger(EmbeddedKafkaInstance.class);
+            this.instance = new EmbeddedKafkaInstance();
+            try {
+                this.instance.startup();
+
+                // JUnit does not have an overall lifecycle event for tearing down
+                // this kind of resource, but shutdown hooks work alright in practice
+                // since this should only be used during testing
+
+                // The only other alternative for lifecycle management is to use a
+                // suite lifecycle to enclose the tests that need this resource.
+                // In practice this becomes unwieldy.
+
+                Runtime.getRuntime().addShutdownHook(new Thread() {
+                    @Override
+                    public void run() {
+                        try {
+                            InstanceHolder.this.instance.shutdown();
+                        } catch (final Throwable t) {
+                            // logging frameworks will likely be shut down
+                            t.printStackTrace(System.err);
+                        }
+                    }
+                });
+
+            } catch (final InterruptedException e) {
+                Thread.currentThread().interrupt();
+                log.error("Interrupted while starting EmbeddedKafkaInstance", e);
+            } catch (final IOException e) {
+                log.error("Unexpected error while starting EmbeddedKafkaInstance", e);
+            } catch (final Throwable e) {
+                // catching throwable because failure to construct an enum
+                // instance will lead to another error being thrown downstream
+                log.error("Unexpected throwable while starting EmbeddedKafkaInstance", e);
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/test/kafka/src/main/java/org/apache/rya/test/kafka/KafkaITBase.java
----------------------------------------------------------------------
diff --git a/test/kafka/src/main/java/org/apache/rya/test/kafka/KafkaITBase.java b/test/kafka/src/main/java/org/apache/rya/test/kafka/KafkaITBase.java
new file mode 100644
index 0000000..ddafbcb
--- /dev/null
+++ b/test/kafka/src/main/java/org/apache/rya/test/kafka/KafkaITBase.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.test.kafka;
+
+import java.util.Properties;
+
+/**
+ * A class intended to be extended for Kafka Integration tests.
+ */
+public class KafkaITBase {
+
+    private static EmbeddedKafkaInstance embeddedKafka = EmbeddedKafkaSingleton.getInstance();
+
+    /**
+     * @return A new Property object containing the correct value for Kafka's
+     *         {@link CommonClientConfigs#BOOTSTRAP_SERVERS_CONFIG}.
+     */
+    protected Properties createBootstrapServerConfig() {
+        return embeddedKafka.createBootstrapServerConfig();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/test/kafka/src/main/java/org/apache/rya/test/kafka/KafkaTestInstanceRule.java
----------------------------------------------------------------------
diff --git a/test/kafka/src/main/java/org/apache/rya/test/kafka/KafkaTestInstanceRule.java b/test/kafka/src/main/java/org/apache/rya/test/kafka/KafkaTestInstanceRule.java
new file mode 100644
index 0000000..5fe3c88
--- /dev/null
+++ b/test/kafka/src/main/java/org/apache/rya/test/kafka/KafkaTestInstanceRule.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.test.kafka;
+
+import java.util.Properties;
+
+import org.I0Itec.zkclient.ZkClient;
+import org.junit.rules.ExternalResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import kafka.admin.AdminUtils;
+import kafka.admin.RackAwareMode;
+import kafka.utils.ZKStringSerializer$;
+import kafka.utils.ZkUtils;
+
+
+/**
+ * Provides a JUnit Rule for interacting with the {@link EmbeddedKafkaSingleton}.
+ *
+ */
+public class KafkaTestInstanceRule extends ExternalResource {
+    private static final Logger logger = LoggerFactory.getLogger(KafkaTestInstanceRule.class);
+    private static final EmbeddedKafkaInstance kafkaInstance = EmbeddedKafkaSingleton.getInstance();
+    private String kafkaTopicName;
+    private final boolean createTopic;
+
+    /**
+     * @param createTopic - If true, a topic shall be created for the value
+     *            provided by {@link #getKafkaTopicName()}. If false, no topics
+     *            shall be created.
+     */
+    public KafkaTestInstanceRule(final boolean createTopic) {
+        this.createTopic = createTopic;
+    }
+
+    /**
+     * @return A unique topic name for this test execution. If multiple topics are required by a test, use this value as
+     *         a prefix.
+     */
+    public String getKafkaTopicName() {
+        if (kafkaTopicName == null) {
+            throw new IllegalStateException("Cannot get Kafka Topic Name outside of a test execution.");
+        }
+        return kafkaTopicName;
+    }
+
+    @Override
+    protected void before() throws Throwable {
+        // Get the next kafka topic name.
+        kafkaTopicName = kafkaInstance.getUniqueTopicName();
+
+        if(createTopic) {
+            createTopic(kafkaTopicName);
+        }
+    }
+
+    @Override
+    protected void after() {
+        kafkaTopicName = null;
+    }
+
+    /**
+     * Utility method to provide additional unique topics if they are required.
+     * @param topicName - The Kafka topic to create.
+     */
+    public void createTopic(final String topicName) {
+        // Setup Kafka.
+        ZkUtils zkUtils = null;
+        try {
+            logger.info("Creating Kafka Topic: '{}'", topicName);
+            zkUtils = ZkUtils.apply(new ZkClient(kafkaInstance.getZookeeperConnect(), 30000, 30000, ZKStringSerializer$.MODULE$), false);
+            AdminUtils.createTopic(zkUtils, topicName, 1, 1, new Properties(), RackAwareMode.Disabled$.MODULE$);
+        }
+        finally {
+            if(zkUtils != null) {
+                zkUtils.close();
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/4089e706/test/kafka/src/main/java/org/apache/rya/test/kafka/PortUtils.java
----------------------------------------------------------------------
diff --git a/test/kafka/src/main/java/org/apache/rya/test/kafka/PortUtils.java b/test/kafka/src/main/java/org/apache/rya/test/kafka/PortUtils.java
new file mode 100644
index 0000000..7dad966
--- /dev/null
+++ b/test/kafka/src/main/java/org/apache/rya/test/kafka/PortUtils.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.rya.test.kafka;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.util.Random;
+
+public class PortUtils {
+
+    private PortUtils() {}
+
+    public static int getRandomFreePort() {
+        final Random r = new Random();
+        int count = 0;
+
+        while (count < 13) {
+            final int port = r.nextInt((1 << 16) - 1024) + 1024;
+
+            try (ServerSocket so = new ServerSocket(port)) {
+                so.setReuseAddress(true);
+                return port;
+            } catch (final IOException e) {
+                // ignore
+            }
+
+            count++;
+        }
+
+        throw new RuntimeException("Unable to find port");
+    }
+}