You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ec...@apache.org on 2013/11/26 16:47:53 UTC

[04/39] ACCUMULO-600 removed wikisearch from trunk

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/parser/TreeBuilder.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/parser/TreeBuilder.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/parser/TreeBuilder.java
deleted file mode 100644
index 58a3508..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/parser/TreeBuilder.java
+++ /dev/null
@@ -1,675 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.parser;
-
-import java.io.StringReader;
-
-import org.apache.accumulo.examples.wikisearch.parser.QueryParser.EvaluationContext;
-import org.apache.accumulo.examples.wikisearch.parser.QueryParser.FunctionResult;
-import org.apache.accumulo.examples.wikisearch.parser.QueryParser.LiteralResult;
-import org.apache.accumulo.examples.wikisearch.parser.QueryParser.ObjectHolder;
-import org.apache.accumulo.examples.wikisearch.parser.QueryParser.QueryTerm;
-import org.apache.accumulo.examples.wikisearch.parser.QueryParser.TermResult;
-import org.apache.commons.jexl2.parser.ASTAdditiveNode;
-import org.apache.commons.jexl2.parser.ASTAdditiveOperator;
-import org.apache.commons.jexl2.parser.ASTAmbiguous;
-import org.apache.commons.jexl2.parser.ASTAndNode;
-import org.apache.commons.jexl2.parser.ASTArrayAccess;
-import org.apache.commons.jexl2.parser.ASTArrayLiteral;
-import org.apache.commons.jexl2.parser.ASTAssignment;
-import org.apache.commons.jexl2.parser.ASTBitwiseAndNode;
-import org.apache.commons.jexl2.parser.ASTBitwiseComplNode;
-import org.apache.commons.jexl2.parser.ASTBitwiseOrNode;
-import org.apache.commons.jexl2.parser.ASTBitwiseXorNode;
-import org.apache.commons.jexl2.parser.ASTBlock;
-import org.apache.commons.jexl2.parser.ASTConstructorNode;
-import org.apache.commons.jexl2.parser.ASTDivNode;
-import org.apache.commons.jexl2.parser.ASTEQNode;
-import org.apache.commons.jexl2.parser.ASTERNode;
-import org.apache.commons.jexl2.parser.ASTEmptyFunction;
-import org.apache.commons.jexl2.parser.ASTFalseNode;
-import org.apache.commons.jexl2.parser.ASTFloatLiteral;
-import org.apache.commons.jexl2.parser.ASTForeachStatement;
-import org.apache.commons.jexl2.parser.ASTFunctionNode;
-import org.apache.commons.jexl2.parser.ASTGENode;
-import org.apache.commons.jexl2.parser.ASTGTNode;
-import org.apache.commons.jexl2.parser.ASTIdentifier;
-import org.apache.commons.jexl2.parser.ASTIfStatement;
-import org.apache.commons.jexl2.parser.ASTIntegerLiteral;
-import org.apache.commons.jexl2.parser.ASTJexlScript;
-import org.apache.commons.jexl2.parser.ASTLENode;
-import org.apache.commons.jexl2.parser.ASTLTNode;
-import org.apache.commons.jexl2.parser.ASTMapEntry;
-import org.apache.commons.jexl2.parser.ASTMapLiteral;
-import org.apache.commons.jexl2.parser.ASTMethodNode;
-import org.apache.commons.jexl2.parser.ASTModNode;
-import org.apache.commons.jexl2.parser.ASTMulNode;
-import org.apache.commons.jexl2.parser.ASTNENode;
-import org.apache.commons.jexl2.parser.ASTNRNode;
-import org.apache.commons.jexl2.parser.ASTNotNode;
-import org.apache.commons.jexl2.parser.ASTNullLiteral;
-import org.apache.commons.jexl2.parser.ASTOrNode;
-import org.apache.commons.jexl2.parser.ASTReference;
-import org.apache.commons.jexl2.parser.ASTSizeFunction;
-import org.apache.commons.jexl2.parser.ASTSizeMethod;
-import org.apache.commons.jexl2.parser.ASTStringLiteral;
-import org.apache.commons.jexl2.parser.ASTTernaryNode;
-import org.apache.commons.jexl2.parser.ASTTrueNode;
-import org.apache.commons.jexl2.parser.ASTUnaryMinusNode;
-import org.apache.commons.jexl2.parser.ASTWhileStatement;
-import org.apache.commons.jexl2.parser.JexlNode;
-import org.apache.commons.jexl2.parser.ParseException;
-import org.apache.commons.jexl2.parser.Parser;
-import org.apache.commons.jexl2.parser.ParserVisitor;
-import org.apache.commons.jexl2.parser.SimpleNode;
-
-
-import com.google.common.collect.Multimap;
-
-/**
- * Class that parses the query and returns a tree of TreeNode's. This class rolls up clauses that are below like conjunctions (AND, OR) for the purposes of
- * creating intersecting iterators.
- * 
- */
-public class TreeBuilder implements ParserVisitor {
-  
-  class RootNode extends JexlNode {
-    
-    public RootNode(int id) {
-      super(id);
-    }
-    
-    public RootNode(Parser p, int id) {
-      super(p, id);
-    }
-    
-  }
-  
-  private TreeNode rootNode = null;
-  private TreeNode currentNode = null;
-  private boolean currentlyInCheckChildren = false;
-  
-  public TreeBuilder(String query) throws ParseException {
-    Parser p = new Parser(new StringReader(";"));
-    ASTJexlScript script = p.parse(new StringReader(query), null);
-    // Check to see if the child node is an AND or OR. If not, then
-    // there must be just a single value in the query expression
-    rootNode = new TreeNode();
-    rootNode.setType(RootNode.class);
-    currentNode = rootNode;
-    EvaluationContext ctx = new EvaluationContext();
-    script.childrenAccept(this, ctx);
-  }
-  
-  public TreeBuilder(ASTJexlScript script) {
-    // Check to see if the child node is an AND or OR. If not, then
-    // there must be just a single value in the query expression
-    rootNode = new TreeNode();
-    rootNode.setType(RootNode.class);
-    currentNode = rootNode;
-    EvaluationContext ctx = new EvaluationContext();
-    script.childrenAccept(this, ctx);
-  }
-  
-  public TreeNode getRootNode() {
-    return this.rootNode;
-  }
-  
-  public Object visit(SimpleNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTJexlScript node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTBlock node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTAmbiguous node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTIfStatement node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTWhileStatement node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTForeachStatement node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTAssignment node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTTernaryNode node, Object data) {
-    return null;
-  }
-  
-  /**
-   * @param node
-   * @param failClass
-   * @return false if any of the nodes equals the fail class or contain a NOT in the subtree
-   */
-  private boolean nodeCheck(JexlNode node, Class<?> failClass) {
-    if (node.getClass().equals(failClass) || node.getClass().equals(ASTNotNode.class))
-      return false;
-    else {
-      for (int i = 0; i < node.jjtGetNumChildren(); i++) {
-        if (!nodeCheck(node.jjtGetChild(i), failClass))
-          return false;
-      }
-    }
-    return true;
-  }
-  
-  /**
-   * Checks to see if all of the child nodes are of the same type (AND/OR) and if so then aggregates all of the child terms. If not returns null.
-   * 
-   * @param parent
-   * @param parentNode
-   * @return Map of field names to query terms or null
-   */
-  private Multimap<String,QueryTerm> checkChildren(JexlNode parent, EvaluationContext ctx) {
-    // If the current node is an AND, then make sure that there is no
-    // OR descendant node, and vice versa. If this is true, then we call
-    // roll up all of the descendent values.
-    this.currentlyInCheckChildren = true;
-    Multimap<String,QueryTerm> rolledUpTerms = null;
-    boolean result = false;
-    if (parent.getClass().equals(ASTOrNode.class)) {
-      for (int i = 0; i < parent.jjtGetNumChildren(); i++) {
-        result = nodeCheck(parent.jjtGetChild(i), ASTAndNode.class);
-        if (!result)
-          break;
-      }
-    } else {
-      for (int i = 0; i < parent.jjtGetNumChildren(); i++) {
-        result = nodeCheck(parent.jjtGetChild(i), ASTOrNode.class);
-        if (!result)
-          break;
-      }
-    }
-    if (result) {
-      // Set current node to a fake node and
-      // roll up the children from this node using the visitor pattern.
-      TreeNode rollupFakeNode = new TreeNode();
-      TreeNode previous = this.currentNode;
-      this.currentNode = rollupFakeNode;
-      // Run the visitor with the fake node.
-      parent.childrenAccept(this, ctx);
-      // Get the terms from the fake node
-      rolledUpTerms = this.currentNode.getTerms();
-      // Reset the current node pointer
-      this.currentNode = previous;
-    }
-    this.currentlyInCheckChildren = false;
-    return rolledUpTerms;
-  }
-  
-  public Object visit(ASTOrNode node, Object data) {
-    boolean previouslyInOrContext = false;
-    EvaluationContext ctx = null;
-    if (null != data && data instanceof EvaluationContext) {
-      ctx = (EvaluationContext) data;
-      previouslyInOrContext = ctx.inOrContext;
-    } else {
-      ctx = new EvaluationContext();
-    }
-    ctx.inOrContext = true;
-    // Are we being called from the checkChildren method? If so, then we
-    // are rolling up terms. If not, then we need to call check children.
-    if (currentlyInCheckChildren) {
-      // Process both sides of this node.
-      node.jjtGetChild(0).jjtAccept(this, data);
-      node.jjtGetChild(1).jjtAccept(this, data);
-    } else {
-      // Create a new OR node under the current node.
-      TreeNode orNode = new TreeNode();
-      orNode.setType(ASTOrNode.class);
-      orNode.setParent(this.currentNode);
-      this.currentNode.getChildren().add(orNode);
-      Multimap<String,QueryTerm> terms = checkChildren(node, ctx);
-      if (terms == null) {
-        // Then there was no rollup, set the current node to the orNode
-        // and process the children. Be sure to set the current Node to
-        // the or node in between calls because we could be processing
-        // an AND node below and the current node will have been switched.
-        // Process both sides of this node.
-        currentNode = orNode;
-        node.jjtGetChild(0).jjtAccept(this, data);
-        currentNode = orNode;
-        node.jjtGetChild(1).jjtAccept(this, data);
-      } else {
-        // There was a rollup, don't process the children and set the terms
-        // on the or node.
-        orNode.setTerms(terms);
-      }
-    }
-    // reset the state
-    if (null != data && !previouslyInOrContext)
-      ctx.inOrContext = false;
-    return null;
-  }
-  
-  public Object visit(ASTAndNode node, Object data) {
-    boolean previouslyInAndContext = false;
-    EvaluationContext ctx = null;
-    if (null != data && data instanceof EvaluationContext) {
-      ctx = (EvaluationContext) data;
-      previouslyInAndContext = ctx.inAndContext;
-    } else {
-      ctx = new EvaluationContext();
-    }
-    ctx.inAndContext = true;
-    // Are we being called from the checkChildren method? If so, then we
-    // are rolling up terms. If not, then we need to call check children.
-    if (currentlyInCheckChildren) {
-      // Process both sides of this node.
-      node.jjtGetChild(0).jjtAccept(this, data);
-      node.jjtGetChild(1).jjtAccept(this, data);
-    } else {
-      // Create a new And node under the current node.
-      TreeNode andNode = new TreeNode();
-      andNode.setType(ASTAndNode.class);
-      andNode.setParent(this.currentNode);
-      this.currentNode.getChildren().add(andNode);
-      Multimap<String,QueryTerm> terms = checkChildren(node, ctx);
-      if (terms == null) {
-        // Then there was no rollup, set the current node to the orNode
-        // and process the children. Be sure to set the current Node to
-        // the and node in between calls because we could be processing
-        // an OR node below and the current node will have been switched.
-        // Process both sides of this node.
-        currentNode = andNode;
-        node.jjtGetChild(0).jjtAccept(this, data);
-        currentNode = andNode;
-        node.jjtGetChild(1).jjtAccept(this, data);
-      } else {
-        // There was a rollup, don't process the children and set the terms
-        // on the or node.
-        andNode.setTerms(terms);
-      }
-    }
-    if (null != data && !previouslyInAndContext)
-      ctx.inAndContext = false;
-    return null;
-  }
-  
-  public Object visit(ASTBitwiseOrNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTBitwiseXorNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTBitwiseAndNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTEQNode node, Object data) {
-    StringBuilder fieldName = new StringBuilder();
-    ObjectHolder value = new ObjectHolder();
-    // Process both sides of this node.
-    Object left = node.jjtGetChild(0).jjtAccept(this, data);
-    Object right = node.jjtGetChild(1).jjtAccept(this, data);
-    // Ignore functions in the query
-    if (left instanceof FunctionResult || right instanceof FunctionResult)
-      return null;
-    decodeResults(left, right, fieldName, value);
-    // We need to check to see if we are in a NOT context. If so,
-    // then we need to reverse the negation.
-    boolean negated = false;
-    if (null != data && data instanceof EvaluationContext) {
-      EvaluationContext ctx = (EvaluationContext) data;
-      if (ctx.inNotContext)
-        negated = !negated;
-    }
-    QueryTerm term = new QueryTerm(negated, JexlOperatorConstants.getOperator(node.getClass()), value.getObject());
-    this.currentNode.getTerms().put(fieldName.toString(), term);
-    return null;
-  }
-  
-  public Object visit(ASTNENode node, Object data) {
-    StringBuilder fieldName = new StringBuilder();
-    ObjectHolder value = new ObjectHolder();
-    // Process both sides of this node.
-    Object left = node.jjtGetChild(0).jjtAccept(this, data);
-    Object right = node.jjtGetChild(1).jjtAccept(this, data);
-    // Ignore functions in the query
-    if (left instanceof FunctionResult || right instanceof FunctionResult)
-      return null;
-    decodeResults(left, right, fieldName, value);
-    // We need to check to see if we are in a NOT context. If so,
-    // then we need to reverse the negation.
-    boolean negated = true;
-    if (null != data && data instanceof EvaluationContext) {
-      EvaluationContext ctx = (EvaluationContext) data;
-      if (ctx.inNotContext)
-        negated = !negated;
-    }
-    QueryTerm term = new QueryTerm(negated, JexlOperatorConstants.getOperator(node.getClass()), value.getObject());
-    this.currentNode.getTerms().put(fieldName.toString(), term);
-    return null;
-  }
-  
-  public Object visit(ASTLTNode node, Object data) {
-    StringBuilder fieldName = new StringBuilder();
-    ObjectHolder value = new ObjectHolder();
-    // Process both sides of this node.
-    Object left = node.jjtGetChild(0).jjtAccept(this, data);
-    Object right = node.jjtGetChild(1).jjtAccept(this, data);
-    // Ignore functions in the query
-    if (left instanceof FunctionResult || right instanceof FunctionResult)
-      return null;
-    decodeResults(left, right, fieldName, value);
-    // We need to check to see if we are in a NOT context. If so,
-    // then we need to reverse the negation.
-    boolean negated = false;
-    if (null != data && data instanceof EvaluationContext) {
-      EvaluationContext ctx = (EvaluationContext) data;
-      if (ctx.inNotContext)
-        negated = !negated;
-    }
-    QueryTerm term = new QueryTerm(negated, JexlOperatorConstants.getOperator(node.getClass()), value.getObject());
-    this.currentNode.getTerms().put(fieldName.toString(), term);
-    return null;
-  }
-  
-  public Object visit(ASTGTNode node, Object data) {
-    StringBuilder fieldName = new StringBuilder();
-    ObjectHolder value = new ObjectHolder();
-    // Process both sides of this node.
-    Object left = node.jjtGetChild(0).jjtAccept(this, data);
-    Object right = node.jjtGetChild(1).jjtAccept(this, data);
-    // Ignore functions in the query
-    if (left instanceof FunctionResult || right instanceof FunctionResult)
-      return null;
-    decodeResults(left, right, fieldName, value);
-    // We need to check to see if we are in a NOT context. If so,
-    // then we need to reverse the negation.
-    boolean negated = false;
-    if (null != data && data instanceof EvaluationContext) {
-      EvaluationContext ctx = (EvaluationContext) data;
-      if (ctx.inNotContext)
-        negated = !negated;
-    }
-    QueryTerm term = new QueryTerm(negated, JexlOperatorConstants.getOperator(node.getClass()), value.getObject());
-    this.currentNode.getTerms().put(fieldName.toString(), term);
-    return null;
-  }
-  
-  public Object visit(ASTLENode node, Object data) {
-    StringBuilder fieldName = new StringBuilder();
-    ObjectHolder value = new ObjectHolder();
-    // Process both sides of this node.
-    Object left = node.jjtGetChild(0).jjtAccept(this, data);
-    Object right = node.jjtGetChild(1).jjtAccept(this, data);
-    // Ignore functions in the query
-    if (left instanceof FunctionResult || right instanceof FunctionResult)
-      return null;
-    decodeResults(left, right, fieldName, value);
-    // We need to check to see if we are in a NOT context. If so,
-    // then we need to reverse the negation.
-    boolean negated = false;
-    if (null != data && data instanceof EvaluationContext) {
-      EvaluationContext ctx = (EvaluationContext) data;
-      if (ctx.inNotContext)
-        negated = !negated;
-    }
-    QueryTerm term = new QueryTerm(negated, JexlOperatorConstants.getOperator(node.getClass()), value.getObject());
-    this.currentNode.getTerms().put(fieldName.toString(), term);
-    return null;
-  }
-  
-  public Object visit(ASTGENode node, Object data) {
-    StringBuilder fieldName = new StringBuilder();
-    ObjectHolder value = new ObjectHolder();
-    // Process both sides of this node.
-    Object left = node.jjtGetChild(0).jjtAccept(this, data);
-    Object right = node.jjtGetChild(1).jjtAccept(this, data);
-    // Ignore functions in the query
-    if (left instanceof FunctionResult || right instanceof FunctionResult)
-      return null;
-    decodeResults(left, right, fieldName, value);
-    // We need to check to see if we are in a NOT context. If so,
-    // then we need to reverse the negation.
-    boolean negated = false;
-    if (null != data && data instanceof EvaluationContext) {
-      EvaluationContext ctx = (EvaluationContext) data;
-      if (ctx.inNotContext)
-        negated = !negated;
-    }
-    QueryTerm term = new QueryTerm(negated, JexlOperatorConstants.getOperator(node.getClass()), value.getObject());
-    this.currentNode.getTerms().put(fieldName.toString(), term);
-    return null;
-  }
-  
-  public Object visit(ASTERNode node, Object data) {
-    StringBuilder fieldName = new StringBuilder();
-    ObjectHolder value = new ObjectHolder();
-    // Process both sides of this node.
-    Object left = node.jjtGetChild(0).jjtAccept(this, data);
-    Object right = node.jjtGetChild(1).jjtAccept(this, data);
-    // Ignore functions in the query
-    if (left instanceof FunctionResult || right instanceof FunctionResult)
-      return null;
-    decodeResults(left, right, fieldName, value);
-    // We need to check to see if we are in a NOT context. If so,
-    // then we need to reverse the negation.
-    boolean negated = false;
-    if (null != data && data instanceof EvaluationContext) {
-      EvaluationContext ctx = (EvaluationContext) data;
-      if (ctx.inNotContext)
-        negated = !negated;
-    }
-    QueryTerm term = new QueryTerm(negated, JexlOperatorConstants.getOperator(node.getClass()), value.getObject());
-    this.currentNode.getTerms().put(fieldName.toString(), term);
-    return null;
-  }
-  
-  public Object visit(ASTNRNode node, Object data) {
-    StringBuilder fieldName = new StringBuilder();
-    ObjectHolder value = new ObjectHolder();
-    // Process both sides of this node.
-    Object left = node.jjtGetChild(0).jjtAccept(this, data);
-    Object right = node.jjtGetChild(1).jjtAccept(this, data);
-    // Ignore functions in the query
-    if (left instanceof FunctionResult || right instanceof FunctionResult)
-      return null;
-    decodeResults(left, right, fieldName, value);
-    // We need to check to see if we are in a NOT context. If so,
-    // then we need to reverse the negation.
-    boolean negated = true;
-    if (null != data && data instanceof EvaluationContext) {
-      EvaluationContext ctx = (EvaluationContext) data;
-      if (ctx.inNotContext)
-        negated = !negated;
-    }
-    QueryTerm term = new QueryTerm(negated, "!~", value.getObject());
-    this.currentNode.getTerms().put(fieldName.toString(), term);
-    return null;
-  }
-  
-  public Object visit(ASTAdditiveNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTAdditiveOperator node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTMulNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTDivNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTModNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTUnaryMinusNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTBitwiseComplNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTNotNode node, Object data) {
-    boolean previouslyInNotContext = false;
-    EvaluationContext ctx = null;
-    if (null != data && data instanceof EvaluationContext) {
-      ctx = (EvaluationContext) data;
-      previouslyInNotContext = ctx.inNotContext;
-    } else {
-      ctx = new EvaluationContext();
-    }
-    ctx.inNotContext = true;
-    // Create a new node in the tree to represent the NOT
-    // Create a new And node under the current node.
-    TreeNode notNode = new TreeNode();
-    notNode.setType(ASTNotNode.class);
-    notNode.setParent(this.currentNode);
-    this.currentNode.getChildren().add(notNode);
-    this.currentNode = notNode;
-    // Process both sides of this node.
-    node.jjtGetChild(0).jjtAccept(this, ctx);
-    // reset the state
-    if (null != data && !previouslyInNotContext)
-      ctx.inNotContext = false;
-    return null;
-  }
-  
-  public Object visit(ASTIdentifier node, Object data) {
-    return new TermResult(node.image);
-  }
-  
-  public Object visit(ASTNullLiteral node, Object data) {
-    return new LiteralResult(node.image);
-  }
-  
-  public Object visit(ASTTrueNode node, Object data) {
-    return new LiteralResult(node.image);
-  }
-  
-  public Object visit(ASTFalseNode node, Object data) {
-    return new LiteralResult(node.image);
-  }
-  
-  public Object visit(ASTIntegerLiteral node, Object data) {
-    return new LiteralResult(node.image);
-  }
-  
-  public Object visit(ASTFloatLiteral node, Object data) {
-    return new LiteralResult(node.image);
-  }
-  
-  public Object visit(ASTStringLiteral node, Object data) {
-    return new LiteralResult("'" + node.image + "'");
-  }
-  
-  public Object visit(ASTArrayLiteral node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTMapLiteral node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTMapEntry node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTEmptyFunction node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTSizeFunction node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTFunctionNode node, Object data) {
-    // objectNode 0 is the prefix
-    // objectNode 1 is the identifier , the others are parameters.
-    // process the remaining arguments
-    FunctionResult fr = new FunctionResult();
-    int argc = node.jjtGetNumChildren() - 2;
-    for (int i = 0; i < argc; i++) {
-      // Process both sides of this node.
-      Object result = node.jjtGetChild(i + 2).jjtAccept(this, data);
-      if (result instanceof TermResult) {
-        TermResult tr = (TermResult) result;
-        fr.getTerms().add(tr);
-      }
-    }
-    return fr;
-  }
-  
-  public Object visit(ASTMethodNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTSizeMethod node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTConstructorNode node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTArrayAccess node, Object data) {
-    return null;
-  }
-  
-  public Object visit(ASTReference node, Object data) {
-    return node.jjtGetChild(0).jjtAccept(this, data);
-  }
-  
-  private void decodeResults(Object left, Object right, StringBuilder fieldName, ObjectHolder holder) {
-    if (left instanceof TermResult) {
-      TermResult tr = (TermResult) left;
-      fieldName.append((String) tr.value);
-      // Then the right has to be the value
-      if (right instanceof LiteralResult) {
-        holder.setObject(((LiteralResult) right).value);
-      } else {
-        throw new IllegalArgumentException("Object mismatch");
-      }
-    } else if (right instanceof TermResult) {
-      TermResult tr = (TermResult) right;
-      fieldName.append((String) tr.value);
-      if (left instanceof LiteralResult) {
-        holder.setObject(((LiteralResult) left).value);
-      } else {
-        throw new IllegalArgumentException("Object mismatch");
-      }
-      
-    } else {
-      throw new IllegalArgumentException("No Term specified in query");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/parser/TreeNode.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/parser/TreeNode.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/parser/TreeNode.java
deleted file mode 100644
index 57ea369..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/parser/TreeNode.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.parser;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Enumeration;
-import java.util.List;
-import java.util.NoSuchElementException;
-import java.util.Vector;
-
-import org.apache.accumulo.examples.wikisearch.parser.QueryParser.QueryTerm;
-import org.apache.commons.jexl2.parser.JexlNode;
-
-
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Multimap;
-
-public class TreeNode {
-  
-  private Class<? extends JexlNode> type = null;
-  /* navigation elements */
-  private TreeNode parent = null;
-  private List<TreeNode> children = new ArrayList<TreeNode>();
-  private Multimap<String,QueryTerm> terms = HashMultimap.create();
-  
-  public TreeNode() {
-    super();
-  }
-  
-  public Class<? extends JexlNode> getType() {
-    return type;
-  }
-  
-  public TreeNode getParent() {
-    return parent;
-  }
-  
-  public List<TreeNode> getChildren() {
-    return children;
-  }
-  
-  public Enumeration<TreeNode> getChildrenAsEnumeration() {
-    return Collections.enumeration(children);
-  }
-  
-  public Multimap<String,QueryTerm> getTerms() {
-    return terms;
-  }
-  
-  public void setType(Class<? extends JexlNode> type) {
-    this.type = type;
-  }
-  
-  public void setParent(TreeNode parent) {
-    this.parent = parent;
-  }
-  
-  public void setChildren(List<TreeNode> children) {
-    this.children = children;
-  }
-  
-  public void setTerms(Multimap<String,QueryTerm> terms) {
-    this.terms = terms;
-  }
-  
-  public boolean isLeaf() {
-    return children.isEmpty();
-  }
-  
-  @Override
-  public String toString() {
-    StringBuilder buf = new StringBuilder();
-    buf.append("Type: ").append(type.getSimpleName());
-    buf.append(" Terms: ");
-    if (null == terms) {
-      buf.append("null");
-    } else {
-      buf.append(terms.toString());
-    }
-    return buf.toString();
-  }
-  
-  public final Enumeration<?> depthFirstEnumeration() {
-    return new PostorderEnumeration(this);
-  }
-  
-  public Enumeration<?> breadthFirstEnumeration() {
-    return new BreadthFirstEnumeration(this);
-  }
-  
-  public final class PostorderEnumeration implements Enumeration<TreeNode> {
-    
-    protected TreeNode root;
-    protected Enumeration<TreeNode> children;
-    protected Enumeration<TreeNode> subtree;
-    
-    public PostorderEnumeration(TreeNode rootNode) {
-      super();
-      root = rootNode;
-      children = root.getChildrenAsEnumeration();
-      subtree = EMPTY_ENUMERATION;
-    }
-    
-    public boolean hasMoreElements() {
-      return root != null;
-    }
-    
-    public TreeNode nextElement() {
-      TreeNode retval;
-      
-      if (subtree.hasMoreElements()) {
-        retval = subtree.nextElement();
-      } else if (children.hasMoreElements()) {
-        subtree = new PostorderEnumeration((TreeNode) children.nextElement());
-        retval = subtree.nextElement();
-      } else {
-        retval = root;
-        root = null;
-      }
-      
-      return retval;
-    }
-  } // End of class PostorderEnumeration
-  
-  static public final Enumeration<TreeNode> EMPTY_ENUMERATION = new Enumeration<TreeNode>() {
-    
-    public boolean hasMoreElements() {
-      return false;
-    }
-    
-    public TreeNode nextElement() {
-      throw new NoSuchElementException("No more elements");
-    }
-  };
-  
-  final class BreadthFirstEnumeration implements Enumeration<TreeNode> {
-    protected Queue queue;
-    
-    public BreadthFirstEnumeration(TreeNode rootNode) {
-      super();
-      Vector<TreeNode> v = new Vector<TreeNode>(1);
-      v.addElement(rootNode); // PENDING: don't really need a vector
-      queue = new Queue();
-      queue.enqueue(v.elements());
-    }
-    
-    public boolean hasMoreElements() {
-      return (!queue.isEmpty() && ((Enumeration<?>) queue.firstObject()).hasMoreElements());
-    }
-    
-    public TreeNode nextElement() {
-      Enumeration<?> enumer = (Enumeration<?>) queue.firstObject();
-      TreeNode node = (TreeNode) enumer.nextElement();
-      Enumeration<?> children = node.getChildrenAsEnumeration();
-      
-      if (!enumer.hasMoreElements()) {
-        queue.dequeue();
-      }
-      if (children.hasMoreElements()) {
-        queue.enqueue(children);
-      }
-      return node;
-    }
-    
-    // A simple queue with a linked list data structure.
-    final class Queue {
-      QNode head; // null if empty
-      QNode tail;
-      
-      final class QNode {
-        public Object object;
-        public QNode next; // null if end
-        
-        public QNode(Object object, QNode next) {
-          this.object = object;
-          this.next = next;
-        }
-      }
-      
-      public void enqueue(Object anObject) {
-        if (head == null) {
-          head = tail = new QNode(anObject, null);
-        } else {
-          tail.next = new QNode(anObject, null);
-          tail = tail.next;
-        }
-      }
-      
-      public Object dequeue() {
-        if (head == null) {
-          throw new NoSuchElementException("No more elements");
-        }
-        
-        Object retval = head.object;
-        QNode oldHead = head;
-        head = head.next;
-        if (head == null) {
-          tail = null;
-        } else {
-          oldHead.next = null;
-        }
-        return retval;
-      }
-      
-      public Object firstObject() {
-        if (head == null) {
-          throw new NoSuchElementException("No more elements");
-        }
-        
-        return head.object;
-      }
-      
-      public boolean isEmpty() {
-        return head == null;
-      }
-      
-    } // End of class Queue
-    
-  } // End of class BreadthFirstEnumeration
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/query/IQuery.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/query/IQuery.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/query/IQuery.java
deleted file mode 100644
index 9f1a8ed..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/query/IQuery.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.query;
-
-import javax.ws.rs.Consumes;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-
-import org.apache.accumulo.examples.wikisearch.sample.Results;
-
-
-@Path("/Query")
-public interface IQuery {
-  
-  @GET
-  @POST
-  @Path("/html")
-  @Consumes("*/*")
-  public String html(@QueryParam("query") String query, @QueryParam("auths") String auths);
-  
-  @GET
-  @POST
-  @Path("/xml")
-  @Consumes("*/*")
-  @Produces("application/xml")
-  public Results xml(@QueryParam("query") String query, @QueryParam("auths") String auths);
-  
-  @GET
-  @POST
-  @Path("/json")
-  @Consumes("*/*")
-  @Produces("application/json")
-  public Results json(@QueryParam("query") String query, @QueryParam("auths") String auths);
-  
-  @GET
-  @POST
-  @Path("/yaml")
-  @Consumes("*/*")
-  @Produces("text/x-yaml")
-  public Results yaml(@QueryParam("query") String query, @QueryParam("auths") String auths);
-  
-  @GET
-  @POST
-  @Path("/content")
-  @Consumes("*/*")
-  @Produces("application/xml")
-  public Results content(@QueryParam("query") String query, @QueryParam("auths") String auths);
-  
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/query/Query.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/query/Query.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/query/Query.java
deleted file mode 100644
index d7dab3a..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/query/Query.java
+++ /dev/null
@@ -1,239 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.query;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.annotation.PostConstruct;
-import javax.annotation.PreDestroy;
-import javax.annotation.Resource;
-import javax.ejb.EJBException;
-import javax.ejb.Local;
-import javax.ejb.Stateless;
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.Marshaller;
-import javax.xml.transform.Templates;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.stream.StreamResult;
-import javax.xml.transform.stream.StreamSource;
-
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Instance;
-import org.apache.accumulo.core.client.ZooKeeperInstance;
-import org.apache.accumulo.examples.wikisearch.logic.ContentLogic;
-import org.apache.accumulo.examples.wikisearch.logic.QueryLogic;
-import org.apache.accumulo.examples.wikisearch.sample.Results;
-import org.apache.log4j.Logger;
-
-@Stateless
-@Local(IQuery.class)
-public class Query implements IQuery {
-  
-  private static final Logger log = Logger.getLogger(Query.class);
-  
-  // Inject values from XML configuration file
-  @Resource(name = "instanceName")
-  private String instanceName;
-  
-  @Resource(name = "zooKeepers")
-  private String zooKeepers;
-  
-  @Resource(name = "username")
-  private String username;
-  
-  @Resource(name = "password")
-  private String password;
-  
-  @Resource(name = "tableName")
-  private String tableName;
-  
-  @Resource(name = "threads")
-  private int threads;
-  
-  private static final String XSL = "/accumulo-wikisearch/style.xsl";
-  
-  @PostConstruct
-  public void init() {
-    log.info("Post Construct");
-  }
-  
-  @PreDestroy
-  public void close() {
-    log.info("Close called.");
-  }
-  
-  /*
-   * (non-Javadoc)
-   * 
-   * @see sample.query.IQuery#html(java.lang.String, java.lang.String)
-   */
-  public String html(String query, String auths) {
-    log.info("HTML query: " + query);
-    URL u;
-    try {
-      u = new URL("http://" + System.getProperty("jboss.bind.address") + ":" + System.getProperty("jboss.web.http.port") + XSL);
-    } catch (MalformedURLException e1) {
-      throw new EJBException("Unable to load XSL stylesheet", e1);
-    }
-    InputStream xslContent;
-    try {
-      xslContent = u.openStream();
-    } catch (IOException e1) {
-      throw new EJBException("Unable to get xsl content", e1);
-    }
-    
-    StringWriter xml = new StringWriter();
-    StringWriter html = new StringWriter();
-    
-    Results results = query(query, auths);
-    try {
-      // Marshall the query results object
-      JAXBContext ctx = JAXBContext.newInstance(Results.class);
-      Marshaller m = ctx.createMarshaller();
-      m.marshal(results, xml);
-      
-      // Perform XSL transform on the xml.
-      StringReader reader = new StringReader(xml.toString());
-      TransformerFactory tf = TransformerFactory.newInstance();
-      // Create the transformer from the xsl
-      Templates xsl = tf.newTemplates(new StreamSource(xslContent));
-      Transformer t = xsl.newTransformer();
-      t.transform(new StreamSource(reader), new StreamResult(html));
-      
-    } catch (Exception e) {
-      throw new EJBException("Error processing query results", e);
-    } finally {
-      try {
-        xslContent.close();
-      } catch (IOException e) {
-        throw new EJBException("Unable to close input stream", e);
-      }
-    }
-    return html.toString();
-  }
-  
-  /*
-   * (non-Javadoc)
-   * 
-   * @see sample.query.IQuery#xml(java.lang.String, java.lang.String)
-   */
-  public Results xml(String query, String auths) {
-    log.info("XML query: " + query);
-    return query(query, auths);
-  }
-  
-  /*
-   * (non-Javadoc)
-   * 
-   * @see sample.query.IQuery#json(java.lang.String, java.lang.String)
-   */
-  public Results json(String query, String auths) {
-    log.info("JSON query: " + query);
-    return query(query, auths);
-  }
-  
-  /*
-   * (non-Javadoc)
-   * 
-   * @see sample.query.IQuery#yaml(java.lang.String, java.lang.String)
-   */
-  public Results yaml(String query, String auths) {
-    log.info("YAML query: " + query);
-    return query(query, auths);
-  }
-  
-  /*
-   * (non-Javadoc)
-   * 
-   * @see sample.query.IQuery#content(java.lang.String, java.lang.String)
-   */
-  public Results content(String query, String auths) {
-    log.info("Content query: " + query);
-    Connector connector = null;
-    if (null == instanceName || null == zooKeepers || null == username || null == password)
-      throw new EJBException("Required parameters not set. [instanceName = " + this.instanceName + ", zookeepers = " + this.zooKeepers + ", username = "
-          + this.username + ", password = " + this.password + "]. Check values in ejb-jar.xml");
-    Instance instance = new ZooKeeperInstance(this.instanceName, this.zooKeepers);
-    try {
-      log.info("Connecting to [instanceName = " + this.instanceName + ", zookeepers = " + this.zooKeepers + ", username = " + this.username + ", password = "
-          + this.password + "].");
-      connector = instance.getConnector(this.username, this.password.getBytes());
-    } catch (Exception e) {
-      throw new EJBException("Error getting connector from instance", e);
-    }
-    
-    // Create list of auths
-    List<String> authorizations = new ArrayList<String>();
-    if (auths != null && auths.length() > 0)
-      for (String a : auths.split(","))
-        authorizations.add(a);
-    ContentLogic table = new ContentLogic();
-    table.setTableName(tableName);
-    return table.runQuery(connector, query, authorizations);
-    
-  }
-  
-  /**
-   * calls the query logic with the parameters, returns results
-   * 
-   * @param query
-   * @param auths
-   * @return The results of a query
-   * @throws ParseException
-   */
-  public Results query(String query, String auths) {
-    
-    Connector connector = null;
-    if (null == instanceName || null == zooKeepers || null == username || null == password)
-      throw new EJBException("Required parameters not set. [instanceName = " + this.instanceName + ", zookeepers = " + this.zooKeepers + ", username = "
-          + this.username + ", password = " + this.password + "]. Check values in ejb-jar.xml");
-    Instance instance = new ZooKeeperInstance(this.instanceName, this.zooKeepers);
-    try {
-      log.info("Connecting to [instanceName = " + this.instanceName + ", zookeepers = " + this.zooKeepers + ", username = " + this.username + ", password = "
-          + this.password + "].");
-      connector = instance.getConnector(this.username, this.password.getBytes());
-    } catch (Exception e) {
-      throw new EJBException("Error getting connector from instance", e);
-    }
-    
-    // Create list of auths
-    List<String> authorizations = new ArrayList<String>();
-    if (auths != null && auths.length() > 0)
-      for (String a : auths.split(","))
-        authorizations.add(a);
-    
-    QueryLogic table = new QueryLogic();
-    table.setTableName(tableName);
-    table.setMetadataTableName(tableName + "Metadata");
-    table.setIndexTableName(tableName + "Index");
-    table.setReverseIndexTableName(tableName + "ReverseIndex");
-    table.setQueryThreads(threads);
-    table.setUnevaluatedFields("TEXT");
-    table.setUseReadAheadIterator(false);
-    return table.runQuery(connector, authorizations, query, null, null, null);
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Document.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Document.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Document.java
deleted file mode 100644
index a5b6ccf..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Document.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.sample;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-
-@XmlAccessorType(XmlAccessType.FIELD)
-public class Document {
-  
-  @XmlElement
-  private String id = null;
-  
-  @XmlElement
-  private List<Field> field = new ArrayList<Field>();
-  
-  public Document() {
-    super();
-  }
-  
-  public Document(String id, List<Field> fields) {
-    super();
-    this.id = id;
-    this.field = fields;
-  }
-  
-  public String getId() {
-    return id;
-  }
-  
-  public List<Field> getFields() {
-    return field;
-  }
-  
-  public void setId(String id) {
-    this.id = id;
-  }
-  
-  public void setFields(List<Field> fields) {
-    this.field = fields;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Field.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Field.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Field.java
deleted file mode 100644
index 9f904a6..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Field.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.sample;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlAttribute;
-import javax.xml.bind.annotation.XmlValue;
-
-@XmlAccessorType(XmlAccessType.FIELD)
-public class Field {
-  
-  @XmlAttribute
-  private String name = null;
-  @XmlValue
-  private String value = null;
-  
-  public Field() {
-    super();
-  }
-  
-  public Field(String fieldName, String fieldValue) {
-    super();
-    this.name = fieldName;
-    this.value = fieldValue;
-  }
-  
-  public String getFieldName() {
-    return name;
-  }
-  
-  public String getFieldValue() {
-    return value;
-  }
-  
-  public void setFieldName(String fieldName) {
-    this.name = fieldName;
-  }
-  
-  public void setFieldValue(String fieldValue) {
-    this.value = fieldValue;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Results.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Results.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Results.java
deleted file mode 100644
index fa804b4..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/sample/Results.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.sample;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class Results {
-  
-  @XmlElement
-  private List<Document> document = new ArrayList<Document>();
-  
-  public Results() {
-    super();
-  }
-  
-  public List<Document> getResults() {
-    return document;
-  }
-  
-  public void setResults(List<Document> results) {
-    this.document = results;
-  }
-  
-  public int size() {
-    if (null == document)
-      return 0;
-    else
-      return document.size();
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/BaseKeyParser.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/BaseKeyParser.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/BaseKeyParser.java
deleted file mode 100644
index a4b2115..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/BaseKeyParser.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.util;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.accumulo.core.data.Key;
-
-public class BaseKeyParser {
-  public static final String ROW_FIELD = "row";
-  public static final String COLUMN_FAMILY_FIELD = "columnFamily";
-  public static final String COLUMN_QUALIFIER_FIELD = "columnQualifier";
-  
-  protected Map<String,String> keyFields = new HashMap<String,String>();
-  protected Key key = null;
-  
-  /**
-   * Parses a Key object into its constituent fields. This method clears any prior values, so the object can be reused without requiring a new instantiation.
-   * This default implementation makes the row, columnFamily, and columnQualifier available.
-   * 
-   * @param key
-   */
-  public void parse(Key key) {
-    this.key = key;
-    
-    keyFields.clear();
-    
-    keyFields.put(ROW_FIELD, key.getRow().toString());
-    keyFields.put(COLUMN_FAMILY_FIELD, key.getColumnFamily().toString());
-    keyFields.put(COLUMN_QUALIFIER_FIELD, key.getColumnQualifier().toString());
-  }
-  
-  public String getFieldValue(String fieldName) {
-    return keyFields.get(fieldName);
-  }
-  
-  public String[] getFieldNames() {
-    String[] fieldNames = new String[keyFields.size()];
-    return keyFields.keySet().toArray(fieldNames);
-  }
-  
-  public BaseKeyParser duplicate() {
-    return new BaseKeyParser();
-  }
-  
-  public String getRow() {
-    return keyFields.get(ROW_FIELD);
-  }
-  
-  public String getColumnFamily() {
-    return keyFields.get(COLUMN_FAMILY_FIELD);
-  }
-  
-  public String getColumnQualifier() {
-    return keyFields.get(COLUMN_QUALIFIER_FIELD);
-  }
-  
-  public Key getKey() {
-    return this.key;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/FieldIndexKeyParser.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/FieldIndexKeyParser.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/FieldIndexKeyParser.java
deleted file mode 100644
index 6fc48cd..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/FieldIndexKeyParser.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.util;
-
-import org.apache.accumulo.core.data.Key;
-
-public class FieldIndexKeyParser extends KeyParser {
-  
-  public static final String DELIMITER = "\0";
-  
-  @Override
-  public void parse(Key key) {
-    super.parse(key);
-    
-    String[] colFamParts = this.keyFields.get(BaseKeyParser.COLUMN_FAMILY_FIELD).split(DELIMITER);
-    this.keyFields.put(FIELDNAME_FIELD, colFamParts.length >= 2 ? colFamParts[1] : "");
-    
-    String[] colQualParts = this.keyFields.get(BaseKeyParser.COLUMN_QUALIFIER_FIELD).split(DELIMITER);
-    this.keyFields.put(SELECTOR_FIELD, colQualParts.length >= 1 ? colQualParts[0] : "");
-    this.keyFields.put(DATATYPE_FIELD, colQualParts.length >= 2 ? colQualParts[1] : "");
-    this.keyFields.put(UID_FIELD, colQualParts.length >= 3 ? colQualParts[2] : "");
-  }
-  
-  @Override
-  public BaseKeyParser duplicate() {
-    return new FieldIndexKeyParser();
-  }
-  
-  @Override
-  public String getSelector() {
-    return keyFields.get(SELECTOR_FIELD);
-  }
-  
-  @Override
-  public String getDataType() {
-    return keyFields.get(DATATYPE_FIELD);
-  }
-  
-  @Override
-  public String getFieldName() {
-    return keyFields.get(FIELDNAME_FIELD);
-  }
-  
-  @Override
-  public String getUid() {
-    return keyFields.get(UID_FIELD);
-  }
-  
-  public String getDataTypeUid() {
-    return getDataType() + DELIMITER + getUid();
-  }
-  
-  // An alias for getSelector
-  public String getFieldValue() {
-    return getSelector();
-  }
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/KeyParser.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/KeyParser.java b/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/KeyParser.java
deleted file mode 100644
index 5648e0e..0000000
--- a/src/examples/wikisearch/query/src/main/java/org/apache/accumulo/examples/wikisearch/util/KeyParser.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.util;
-
-import org.apache.accumulo.core.data.Key;
-
-public class KeyParser extends BaseKeyParser {
-  public static final String SELECTOR_FIELD = "selector";
-  public static final String DATATYPE_FIELD = "dataType";
-  public static final String FIELDNAME_FIELD = "fieldName";
-  public static final String UID_FIELD = "uid";
-  public static final String DELIMITER = "\0";
-  
-  @Override
-  public void parse(Key key) {
-    super.parse(key);
-    
-    String[] colFamParts = this.keyFields.get(BaseKeyParser.COLUMN_FAMILY_FIELD).split(DELIMITER);
-    this.keyFields.put(FIELDNAME_FIELD, colFamParts.length >= 2 ? colFamParts[1] : "");
-    
-    String[] colQualParts = this.keyFields.get(BaseKeyParser.COLUMN_QUALIFIER_FIELD).split(DELIMITER);
-    this.keyFields.put(SELECTOR_FIELD, colQualParts.length >= 1 ? colQualParts[0] : "");
-    this.keyFields.put(DATATYPE_FIELD, colQualParts.length >= 2 ? colQualParts[1] : "");
-    this.keyFields.put(UID_FIELD, colQualParts.length >= 3 ? colQualParts[2] : "");
-  }
-  
-  @Override
-  public BaseKeyParser duplicate() {
-    return new KeyParser();
-  }
-  
-  public String getSelector() {
-    return keyFields.get(SELECTOR_FIELD);
-  }
-  
-  public String getDataType() {
-    return keyFields.get(DATATYPE_FIELD);
-  }
-  
-  public String getFieldName() {
-    return keyFields.get(FIELDNAME_FIELD);
-  }
-  
-  public String getUid() {
-    return keyFields.get(UID_FIELD);
-  }
-  
-  public String getDataTypeUid() {
-    return getDataType() + DELIMITER + getUid();
-  }
-  
-  // An alias for getSelector
-  public String getFieldValue() {
-    return getSelector();
-  }
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/resources/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/resources/META-INF/MANIFEST.MF b/src/examples/wikisearch/query/src/main/resources/META-INF/MANIFEST.MF
deleted file mode 100644
index 59499bc..0000000
--- a/src/examples/wikisearch/query/src/main/resources/META-INF/MANIFEST.MF
+++ /dev/null
@@ -1,2 +0,0 @@
-Manifest-Version: 1.0
-

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/main/resources/META-INF/ejb-jar.xml.example
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/main/resources/META-INF/ejb-jar.xml.example b/src/examples/wikisearch/query/src/main/resources/META-INF/ejb-jar.xml.example
deleted file mode 100644
index dbfabae..0000000
--- a/src/examples/wikisearch/query/src/main/resources/META-INF/ejb-jar.xml.example
+++ /dev/null
@@ -1,62 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<ejb-jar xmlns="http://java.sun.com/xml/ns/javaee"
-  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-  xsi:schemaLocation="http://java.sun.com/xml/ns/javaee
-          http://java.sun.com/xml/ns/javaee/ejb-jar_3_1.xsd" version="3.1">
-  <enterprise-beans>
-    <session>
-      <ejb-name>Query</ejb-name>
-      <env-entry>
-        <env-entry-name>instanceName</env-entry-name>
-        <env-entry-type>java.lang.String</env-entry-type>
-        <env-entry-value><!-- replace me --></env-entry-value>
-      </env-entry>
-      <env-entry>
-        <env-entry-name>zooKeepers</env-entry-name>
-        <env-entry-type>java.lang.String</env-entry-type>
-        <env-entry-value><!-- replace me --></env-entry-value>
-      </env-entry>
-      <env-entry>
-        <env-entry-name>username</env-entry-name>
-        <env-entry-type>java.lang.String</env-entry-type>
-        <env-entry-value><!-- replace me --></env-entry-value>
-      </env-entry>
-      <env-entry>
-        <env-entry-name>password</env-entry-name>
-        <env-entry-type>java.lang.String</env-entry-type>
-        <env-entry-value><!-- replace me --></env-entry-value>
-      </env-entry>
-      <env-entry>
-        <env-entry-name>tableName</env-entry-name>
-        <env-entry-type>java.lang.String</env-entry-type>
-        <env-entry-value>wiki</env-entry-value>
-      </env-entry>
-      <env-entry>
-        <env-entry-name>partitions</env-entry-name>
-        <env-entry-type>java.lang.Integer</env-entry-type>
-        <env-entry-value>100</env-entry-value>
-      </env-entry>
-      <env-entry>
-        <env-entry-name>threads</env-entry-name>
-        <env-entry-type>java.lang.Integer</env-entry-type>
-        <env-entry-value>8</env-entry-value>
-      </env-entry>
-    </session>
-  </enterprise-beans>
-</ejb-jar>

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java b/src/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java
deleted file mode 100644
index 35743b3..0000000
--- a/src/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.logic;
-
-import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.Counters;
-import org.apache.hadoop.mapreduce.StatusReporter;
-
-public class StandaloneStatusReporter extends StatusReporter {
-  
-  private Counters c = new Counters();
-  
-  private long filesProcessed = 0;
-  private long recordsProcessed = 0;
-  
-  public Counters getCounters() {
-    return c;
-  }
-  
-  @Override
-  public Counter getCounter(Enum<?> name) {
-    return c.findCounter(name);
-  }
-  
-  @Override
-  public Counter getCounter(String group, String name) {
-    return c.findCounter(group, name);
-  }
-  
-  @Override
-  public void progress() {
-    // do nothing
-  }
-  
-  @Override
-  public void setStatus(String status) {
-    // do nothing
-  }
-  
-  public long getFilesProcessed() {
-    return filesProcessed;
-  }
-  
-  public long getRecordsProcessed() {
-    return recordsProcessed;
-  }
-  
-  public void incrementFilesProcessed() {
-    filesProcessed++;
-    recordsProcessed = 0;
-  }
-  
-  public void incrementRecordsProcessed() {
-    recordsProcessed++;
-  }
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/8db62992/src/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
----------------------------------------------------------------------
diff --git a/src/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java b/src/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
deleted file mode 100644
index 938f01b..0000000
--- a/src/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.examples.wikisearch.logic;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map.Entry;
-
-import junit.framework.Assert;
-
-import org.apache.accumulo.core.client.BatchWriter;
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.MutationsRejectedException;
-import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.client.mock.MockInstance;
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.Mutation;
-import org.apache.accumulo.core.data.Range;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.core.util.ContextFactory;
-import org.apache.accumulo.examples.wikisearch.ingest.WikipediaConfiguration;
-import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
-import org.apache.accumulo.examples.wikisearch.ingest.WikipediaMapper;
-import org.apache.accumulo.examples.wikisearch.parser.RangeCalculator;
-import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader;
-import org.apache.accumulo.examples.wikisearch.sample.Document;
-import org.apache.accumulo.examples.wikisearch.sample.Field;
-import org.apache.accumulo.examples.wikisearch.sample.Results;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.RawLocalFileSystem;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.mapreduce.OutputCommitter;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.junit.Before;
-import org.junit.Test;
-
-public class TestQueryLogic {
-  
-  private static final String METADATA_TABLE_NAME = "wikiMetadata";
-  
-  private static final String TABLE_NAME = "wiki";
-  
-  private static final String INDEX_TABLE_NAME = "wikiIndex";
-  
-  private static final String RINDEX_TABLE_NAME = "wikiReverseIndex";
-  
-  private static final String TABLE_NAMES[] = {METADATA_TABLE_NAME, TABLE_NAME, RINDEX_TABLE_NAME, INDEX_TABLE_NAME};
-  
-  private class MockAccumuloRecordWriter extends RecordWriter<Text,Mutation> {
-    @Override
-    public void write(Text key, Mutation value) throws IOException, InterruptedException {
-      try {
-        writerMap.get(key).addMutation(value);
-      } catch (MutationsRejectedException e) {
-        throw new IOException("Error adding mutation", e);
-      }
-    }
-    
-    @Override
-    public void close(TaskAttemptContext context) throws IOException, InterruptedException {
-      try {
-        for (BatchWriter w : writerMap.values()) {
-          w.flush();
-          w.close();
-        }
-      } catch (MutationsRejectedException e) {
-        throw new IOException("Error closing Batch Writer", e);
-      }
-    }
-    
-  }
-  
-  private Connector c = null;
-  private Configuration conf = new Configuration();
-  private HashMap<Text,BatchWriter> writerMap = new HashMap<Text,BatchWriter>();
-  private QueryLogic table = null;
-  
-  @Before
-  public void setup() throws Exception {
-    
-    Logger.getLogger(AbstractQueryLogic.class).setLevel(Level.DEBUG);
-    Logger.getLogger(QueryLogic.class).setLevel(Level.DEBUG);
-    Logger.getLogger(RangeCalculator.class).setLevel(Level.DEBUG);
-    
-    conf.set(AggregatingRecordReader.START_TOKEN, "<page>");
-    conf.set(AggregatingRecordReader.END_TOKEN, "</page>");
-    conf.set(WikipediaConfiguration.TABLE_NAME, TABLE_NAME);
-    conf.set(WikipediaConfiguration.NUM_PARTITIONS, "1");
-    conf.set(WikipediaConfiguration.NUM_GROUPS, "1");
-    
-    MockInstance i = new MockInstance();
-    c = i.getConnector("root", "");
-    for (String table : TABLE_NAMES) {
-      try {
-        c.tableOperations().delete(table);
-      } catch (Exception ex) {}
-      c.tableOperations().create(table);
-      writerMap.put(new Text(table), c.createBatchWriter(table, 1000L, 1000L, 1));
-    }
-    
-    TaskAttemptContext context = ContextFactory.createTaskAttemptContext(conf);
-    
-    RawLocalFileSystem fs = new RawLocalFileSystem();
-    fs.setConf(conf);
-    
-    URL url = ClassLoader.getSystemResource("enwiki-20110901-001.xml");
-    Assert.assertNotNull(url);
-    File data = new File(url.toURI());
-    Path tmpFile = new Path(data.getAbsolutePath());
-    
-    // Setup the Mapper
-    WikipediaInputSplit split = new WikipediaInputSplit(new FileSplit(tmpFile, 0, fs.pathToFile(tmpFile).length(), null), 0);
-    AggregatingRecordReader rr = new AggregatingRecordReader();
-    Path ocPath = new Path(tmpFile, "oc");
-    OutputCommitter oc = new FileOutputCommitter(ocPath, context);
-    fs.deleteOnExit(ocPath);
-    StandaloneStatusReporter sr = new StandaloneStatusReporter();
-    rr.initialize(split, context);
-    MockAccumuloRecordWriter rw = new MockAccumuloRecordWriter();
-    WikipediaMapper mapper = new WikipediaMapper();
-    
-    // Load data into Mock Accumulo
-    Mapper<LongWritable,Text,Text,Mutation>.Context con = ContextFactory.createMapContext(mapper, context, rr, rw, oc, sr, split);
-    mapper.run(con);
-    
-    // Flush and close record writers.
-    rw.close(context);
-    
-    table = new QueryLogic();
-    table.setMetadataTableName(METADATA_TABLE_NAME);
-    table.setTableName(TABLE_NAME);
-    table.setIndexTableName(INDEX_TABLE_NAME);
-    table.setReverseIndexTableName(RINDEX_TABLE_NAME);
-    table.setUseReadAheadIterator(false);
-  }
-  
-  void debugQuery(String tableName) throws Exception {
-    Scanner s = c.createScanner(tableName, new Authorizations());
-    Range r = new Range();
-    s.setRange(r);
-    for (Entry<Key,Value> entry : s)
-      System.out.println(entry.getKey().toString() + " " + entry.getValue().toString());
-  }
-  
-  @Test
-  public void testTitle() {
-    Logger.getLogger(AbstractQueryLogic.class).setLevel(Level.OFF);
-    Logger.getLogger(RangeCalculator.class).setLevel(Level.OFF);
-    List<String> auths = new ArrayList<String>();
-    auths.add("enwiki");
-    Results results = table.runQuery(c, auths, "TITLE == 'afghanistanhistory'", null, null, null);
-    for (Document doc : results.getResults()) {
-      System.out.println("id: " + doc.getId());
-      for (Field field : doc.getFields())
-        System.out.println(field.getFieldName() + " -> " + field.getFieldValue());
-    }
-  }
-  
-}