You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ja...@apache.org on 2014/02/05 04:20:48 UTC

git commit: Fix explain plan for detection of lookup, use RVC with IN for delete columns, normalize single element IN to equality, correctly detect null value in RVC during parse time

Updated Branches:
  refs/heads/master 47707a7a7 -> ff1c1af6f


Fix explain plan for detection of lookup, use RVC with IN for delete columns, normalize single element IN to equality, correctly detect null value in RVC during parse time


Project: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/commit/ff1c1af6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/tree/ff1c1af6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-phoenix/diff/ff1c1af6

Branch: refs/heads/master
Commit: ff1c1af6f5a5f6fb64c09e71fc89b5191d455e85
Parents: 47707a7
Author: James Taylor <ja...@apache.org>
Authored: Tue Feb 4 19:20:38 2014 -0800
Committer: James Taylor <ja...@apache.org>
Committed: Tue Feb 4 19:20:38 2014 -0800

----------------------------------------------------------------------
 .../apache/phoenix/compile/DeleteCompiler.java  | 10 ++--
 .../phoenix/compile/ExpressionCompiler.java     |  6 +-
 .../org/apache/phoenix/compile/ScanRanges.java  |  6 ++
 .../apache/phoenix/compile/WhereOptimizer.java  |  6 +-
 .../phoenix/expression/InListExpression.java    | 22 +++++---
 .../apache/phoenix/iterate/ExplainTable.java    | 33 +++++------
 .../apache/phoenix/parse/LiteralParseNode.java  |  4 +-
 .../apache/phoenix/parse/ParseNodeRewriter.java | 37 ++++++++----
 .../apache/phoenix/schema/MetaDataClient.java   | 59 +++++++-------------
 .../org/apache/phoenix/schema/PDataType.java    | 18 +++---
 .../end2end/RowValueConstructorTest.java        |  9 +--
 .../phoenix/end2end/index/SaltedIndexTest.java  |  6 +-
 .../java/org/apache/phoenix/util/TestUtil.java  |  8 +--
 13 files changed, 115 insertions(+), 109 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index e22013b..8197d42 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -24,6 +24,7 @@ import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 
@@ -235,10 +236,11 @@ public class DeleteCompiler {
                 public MutationState execute() {
                     // We have a point lookup, so we know we have a simple set of fully qualified
                     // keys for our ranges
-                    List<KeyRange> keys = context.getScanRanges().getRanges().get(0);
-                    Map<ImmutableBytesPtr,Map<PColumn,byte[]>> mutation = Maps.newHashMapWithExpectedSize(keys.size());
-                    for (KeyRange key : keys) {
-                        mutation.put(new ImmutableBytesPtr(key.getLowerRange()), PRow.DELETE_MARKER);
+                    ScanRanges ranges = context.getScanRanges();
+                    Iterator<KeyRange> iterator = ranges.getPointLookupKeyIterator(); 
+                    Map<ImmutableBytesPtr,Map<PColumn,byte[]>> mutation = Maps.newHashMapWithExpectedSize(ranges.getPointLookupCount());
+                    while (iterator.hasNext()) {
+                        mutation.put(new ImmutableBytesPtr(iterator.next().getLowerRange()), PRow.DELETE_MARKER);
                     }
                     return new MutationState(tableRef, mutation, 0, maxSize, connection);
                 }

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
index cd2b0e2..08944f1 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
@@ -31,7 +31,6 @@ import java.util.List;
 
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-
 import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
@@ -742,10 +741,7 @@ public class ExpressionCompiler extends UnsupportedAllParseNodeVisitor<Expressio
             return LiteralExpression.newConstant(null, PDataType.BOOLEAN, firstChild.isDeterministic());
         }
         
-        Expression e = InListExpression.create(inChildren, ptr);
-        if (node.isNegate()) {
-            e = new NotExpression(e);
-        }
+        Expression e = InListExpression.create(inChildren, ptr, node.isNegate());
         if (node.isStateless()) {
             if (!e.evaluate(null, ptr) || ptr.getLength() == 0) {
                 return LiteralExpression.newConstant(null, e.getDataType(), e.isDeterministic());

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
index fec2259..88a1765 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ScanRanges.java
@@ -21,6 +21,7 @@ package org.apache.phoenix.compile;
 
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.Iterator;
 import java.util.List;
 
 import org.apache.hadoop.hbase.client.Scan;
@@ -34,6 +35,7 @@ import org.apache.phoenix.util.ScanUtil;
 import org.apache.phoenix.util.SchemaUtil;
 
 import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
 
 
@@ -201,6 +203,10 @@ public class ScanRanges {
     public int getPointLookupCount() {
         return isPointLookup ? ranges.get(0).size() : 0;
     }
+    
+    public Iterator<KeyRange> getPointLookupKeyIterator() {
+        return isPointLookup ? ranges.get(0).iterator() : Iterators.<KeyRange>emptyIterator();
+    }
 
     public void setScanStartStopRow(Scan scan) {
         if (isEverything()) {

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
index 5072130..005e6ca 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereOptimizer.java
@@ -76,6 +76,7 @@ import com.google.common.collect.Lists;
  * @since 0.1
  */
 public class WhereOptimizer {
+    private static final List<KeyRange> EVERYTHING_RANGES = Collections.<KeyRange>singletonList(KeyRange.EVERYTHING_RANGE);
     private static final List<KeyRange> SALT_PLACEHOLDER = Collections.singletonList(PDataType.CHAR.getKeyRange(QueryConstants.SEPARATOR_BYTE_ARRAY));
     
     private WhereOptimizer() {
@@ -177,8 +178,8 @@ public class WhereOptimizer {
                     cnf.add(Collections.singletonList(KeyRange.EVERYTHING_RANGE));
                 }
             }
-            // We support (a,b) IN ((1,2),(3,4), so in this case we switch to a flattened schema
-            if (fullyQualifiedColumnCount > 1 && slot.getPKSpan() == fullyQualifiedColumnCount && slot.getKeyRanges().size() > 1) {
+            // We support (a,b) IN ((1,2),(3,4)), so in this case we switch to a flattened schema
+            if (fullyQualifiedColumnCount > 1 && slot.getPKSpan() == fullyQualifiedColumnCount && !EVERYTHING_RANGES.equals(slot.getKeyRanges())) {
                 schema = nBuckets == null ? SchemaUtil.VAR_BINARY_SCHEMA : SaltingUtil.VAR_BINARY_SALTED_SCHEMA;
             }
             KeyPart keyPart = slot.getKeyPart();
@@ -269,7 +270,6 @@ public class WhereOptimizer {
      * operators, CASE statements, and string concatenation.
      */
     public static class KeyExpressionVisitor extends TraverseNoExpressionVisitor<KeyExpressionVisitor.KeySlots> {
-        private static final List<KeyRange> EVERYTHING_RANGES = Collections.<KeyRange>singletonList(KeyRange.EVERYTHING_RANGE);
         private static final KeySlots DEGENERATE_KEY_PARTS = new KeySlots() {
             @Override
             public Iterator<KeySlot> iterator() {

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
index f6a397d..09323c2 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
@@ -29,19 +29,19 @@ import java.util.List;
 import java.util.Set;
 
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.index.util.ImmutableBytesPtr;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.WritableUtils;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import org.apache.hadoop.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.expression.visitor.ExpressionVisitor;
 import org.apache.phoenix.schema.ConstraintViolationException;
 import org.apache.phoenix.schema.PDataType;
 import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.util.ByteUtil;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
 /*
  * Implementation of a SQL foo IN (a,b,c) expression. Other than the first
  * expression, child expressions must be constants.
@@ -57,7 +57,7 @@ public class InListExpression extends BaseSingleExpression {
     private ImmutableBytesPtr value = new ImmutableBytesPtr();
     private List<Expression> keyExpressions; // client side only
 
-    public static Expression create (List<Expression> children, ImmutableBytesWritable ptr) throws SQLException {
+    public static Expression create (List<Expression> children, ImmutableBytesWritable ptr, boolean isNegate) throws SQLException {
         Expression firstChild = children.get(0);
         PDataType firstChildType = firstChild.getDataType();
         
@@ -101,13 +101,19 @@ public class InListExpression extends BaseSingleExpression {
         // TODO: if inChildren.isEmpty() then Oracle throws a type mismatch exception. This means
         // that none of the list elements match in type and there's no null element. We'd return
         // false in this case. Should we throw?
-        if (keys.size() == 2) {
-            expression = new ComparisonExpression(CompareOp.EQUAL, keys);
+        // FIXME: We don't optimize RVC equality expression currently like we do with an IN
+        // expression, so don't convert in that case.
+        if (keys.size() == 2 && ! (firstChild instanceof RowValueConstructorExpression) ) {
+            expression = new ComparisonExpression(isNegate ? CompareOp.NOT_EQUAL : CompareOp.EQUAL, keys);
         } else {
             expression = new InListExpression(keys, coercedKeyExpressions);
+            if (isNegate) { 
+                expression = new NotExpression(expression);
+            }
         }
         return expression;
-    }    
+    }
+    
     public InListExpression() {
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java
index 3eba7da..0fda9f8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/ExplainTable.java
@@ -64,30 +64,27 @@ public abstract class ExplainTable {
 
     private boolean explainSkipScan(StringBuilder buf) {
         ScanRanges scanRanges = context.getScanRanges();
-        if (scanRanges.useSkipScanFilter()) {
+        if (scanRanges.isPointLookup()) {
             int keyCount = scanRanges.getPointLookupCount();
-            if (keyCount == 0) {
-                buf.append("SKIP SCAN ");
-                int count = 1;
-                boolean hasRanges = false;
-                for (List<KeyRange> ranges : scanRanges.getRanges()) {
-                    count *= ranges.size();
-                    for (KeyRange range : ranges) {
-                        hasRanges |= !range.isSingleKey();
-                    }
+            buf.append("POINT LOOKUP ON " + keyCount + " KEY" + (keyCount > 1 ? "S " : " "));
+        } else if (scanRanges.useSkipScanFilter()) {
+            buf.append("SKIP SCAN ");
+            int count = 1;
+            boolean hasRanges = false;
+            for (List<KeyRange> ranges : scanRanges.getRanges()) {
+                count *= ranges.size();
+                for (KeyRange range : ranges) {
+                    hasRanges |= !range.isSingleKey();
                 }
-                buf.append("ON ");
-                buf.append(count);
-                buf.append(hasRanges ? " RANGE" : " KEY");
-                buf.append(count > 1 ? "S " : " ");
-            } else {
-                buf.append("POINT LOOKUP ON " + keyCount + " KEY" + (keyCount > 1 ? "S " : " "));
             }
-            return true;
+            buf.append("ON ");
+            buf.append(count);
+            buf.append(hasRanges ? " RANGE" : " KEY");
+            buf.append(count > 1 ? "S " : " ");
         } else {
             buf.append("RANGE SCAN ");
         }
-        return false;
+        return scanRanges.useSkipScanFilter();
     }
     
     protected void explain(String prefix, List<String> planSteps) {

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java
index 25247ee..5e3d71e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java
@@ -44,8 +44,10 @@ public class LiteralParseNode extends TerminalParseNode {
     private final PDataType type;
     
     public LiteralParseNode(Object value) {
-        this.value = value;
         this.type = PDataType.fromLiteral(value);
+        // This will make the value null if the value passed through represents null for the given type.
+        // For example, an empty string is treated as a null.
+        this.value = this.type == null ? null : this.type.toObject(value, this.type);
     }
 
     public PDataType getType() {

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
index 504b496..7eab160 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
@@ -25,13 +25,13 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
 import org.apache.phoenix.compile.ColumnResolver;
 import org.apache.phoenix.schema.AmbiguousColumnException;
 import org.apache.phoenix.schema.ColumnNotFoundException;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
 /**
  * 
  * Base class for visitors that rewrite the expression node hierarchy
@@ -339,12 +339,21 @@ public class ParseNodeRewriter extends TraverseAllParseNodeVisitor<ParseNode> {
     
     @Override
     public ParseNode visitLeave(final InListParseNode node, List<ParseNode> nodes) throws SQLException {
-        return leaveCompoundNode(node, nodes, new CompoundNodeFactory() {
+        ParseNode normNode = leaveCompoundNode(node, nodes, new CompoundNodeFactory() {
             @Override
             public ParseNode createNode(List<ParseNode> children) {
                 return NODE_FACTORY.inList(children, node.isNegate());
             }
         });
+        if (normNode.getChildren().size() == 2) { // Rewrite as equality or inequality
+            // Rewrite row value constructor in = or != expression, as this is the same as if it was
+            // used in an equality expression which will be more efficient
+            ParseNode lhs = normNode.getChildren().get(0);
+            ParseNode rhs = normNode.getChildren().get(1);
+            normNode = NODE_FACTORY.comparison(node.isNegate() ? CompareOp.NOT_EQUAL : CompareOp.EQUAL, lhs, rhs);
+            normNode = normNode.accept(this);
+        }
+        return normNode;
     }
     
     @Override
@@ -387,14 +396,18 @@ public class ParseNodeRewriter extends TraverseAllParseNodeVisitor<ParseNode> {
             for (int i = 1; i < rhs.getChildren().size(); i++) {
                 rewriteRowValueConstuctorEqualityComparison(null, rhs.getChildren().get(i), andNodes);
             }
-        } else if (lhs == null && rhs == null) { // null == null will end up making the query degenerate
-            andNodes.add(NODE_FACTORY.comparison(CompareOp.EQUAL, null, null).accept(this));
-        } else if (lhs == null) { // AND rhs IS NULL
-            andNodes.add(NODE_FACTORY.isNull(rhs, false).accept(this));
-        } else if (rhs == null) { // AND lhs IS NULL
-            andNodes.add(NODE_FACTORY.isNull(lhs, false).accept(this));
-        } else { // AND lhs = rhs
-            andNodes.add(NODE_FACTORY.comparison(CompareOp.EQUAL, lhs, rhs).accept(this));
+        } else {
+            boolean isLHSNull = lhs == null || (lhs instanceof LiteralParseNode && ((LiteralParseNode)lhs).getValue() == null);
+            boolean isRHSNull = rhs == null || (rhs instanceof LiteralParseNode && ((LiteralParseNode)rhs).getValue() == null);
+            if (isLHSNull && isRHSNull) { // null == null will end up making the query degenerate
+                andNodes.add(NODE_FACTORY.comparison(CompareOp.EQUAL, lhs, rhs).accept(this));
+            } else if (isLHSNull) { // AND rhs IS NULL
+                andNodes.add(NODE_FACTORY.isNull(rhs, false).accept(this));
+            } else if (isRHSNull) { // AND lhs IS NULL
+                andNodes.add(NODE_FACTORY.isNull(lhs, false).accept(this));
+            } else { // AND lhs = rhs
+                andNodes.add(NODE_FACTORY.comparison(CompareOp.EQUAL, lhs, rhs).accept(this));
+            }
         }
     }
     

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index 86fd4ab..478731e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -1408,51 +1408,32 @@ public class MetaDataClient {
 
 
     private String dropColumnMutations(PTable table, List<PColumn> columnsToDrop, List<Mutation> tableMetaData) throws SQLException {
-        String tenantId = connection.getTenantId() == null ? null : connection.getTenantId().getString();
+        String tenantId = connection.getTenantId() == null ? "" : connection.getTenantId().getString();
         String schemaName = table.getSchemaName().getString();
         String tableName = table.getTableName().getString();
         String familyName = null;
+        /*
+         * Generate a fully qualified RVC with an IN clause, since that's what our optimizer can
+         * handle currently. If/when the optimizer handles (A and ((B AND C) OR (D AND E))) we
+         * can factor out the tenant ID, schema name, and table name columns
+         */
         StringBuilder buf = new StringBuilder("DELETE FROM " + TYPE_SCHEMA + ".\"" + TYPE_TABLE + "\" WHERE ");
-        buf.append(TENANT_ID);
-        if (tenantId == null || tenantId.length() == 0) {
-            buf.append(" IS NULL AND ");
-        } else {
-            buf.append(" = ? AND ");
-        }
-        buf.append(TABLE_SCHEM_NAME);
-        if (schemaName == null || schemaName.length() == 0) {
-            buf.append(" IS NULL AND ");
-        } else {
-            buf.append(" = ? AND ");
+        buf.append("(" + 
+                TENANT_ID + "," + TABLE_SCHEM_NAME + "," + TABLE_NAME_NAME + "," + 
+                COLUMN_NAME + ", " + TABLE_CAT_NAME + ") IN (");
+        for(PColumn columnToDrop : columnsToDrop) {
+            buf.append("('" + tenantId + "'");
+            buf.append(",'" + schemaName + "'");
+            buf.append(",'" + tableName + "'");
+            buf.append(",'" + columnToDrop.getName().getString() + "'");
+            buf.append(",'" + (columnToDrop.getFamilyName() == null ? "" : columnToDrop.getFamilyName().getString()) + "'),");
         }
-        buf.append (TABLE_NAME_NAME + " = ? AND " + COLUMN_NAME + " = ? AND " + TABLE_CAT_NAME);
-        buf.append(" = ?");
+        buf.setCharAt(buf.length()-1, ')');
         
-        // TODO: when DeleteCompiler supports running an fully qualified IN query on the client-side,
-        // we can use a single IN query here instead of executing a different query per column being dropped.
-        PreparedStatement colDelete = connection.prepareStatement(buf.toString());
-        try {
-            for(PColumn columnToDrop : columnsToDrop) {
-                int i = 1;
-                if (tenantId != null && tenantId.length() > 0) {
-                    colDelete.setString(i++, tenantId);
-                }
-                if (schemaName != null & schemaName.length() > 0) {
-                    colDelete.setString(i++, schemaName);    
-                }
-                colDelete.setString(i++, tableName);
-                colDelete.setString(i++, columnToDrop.getName().getString());
-                colDelete.setString(i++, columnToDrop.getFamilyName() == null ? null : columnToDrop.getFamilyName().getString());
-                colDelete.execute();
-            }
-        } finally {
-            if(colDelete != null) {
-                colDelete.close();
-            }
-        }
+        connection.createStatement().execute(buf.toString());
         
-       Collections.sort(columnsToDrop,new Comparator<PColumn> () {
-           @Override
+        Collections.sort(columnsToDrop,new Comparator<PColumn> () {
+            @Override
             public int compare(PColumn left, PColumn right) {
                return Ints.compare(left.getPosition(), right.getPosition());
             }
@@ -1460,7 +1441,7 @@ public class MetaDataClient {
     
         int columnsToDropIndex = 0;
         PreparedStatement colUpdate = connection.prepareStatement(UPDATE_COLUMN_POSITION);
-        colUpdate.setString(1, connection.getTenantId() == null ? null : connection.getTenantId().getString());
+        colUpdate.setString(1, tenantId);
         colUpdate.setString(2, schemaName);
         colUpdate.setString(3, tableName);
         for (int i = columnsToDrop.get(columnsToDropIndex).getPosition() + 1; i < table.getColumns().size(); i++) {

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java
index 8d14fb0..f38a98d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java
@@ -33,12 +33,6 @@ import java.util.Map;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Base64;
 import org.apache.hadoop.hbase.util.Bytes;
-
-import com.google.common.collect.ImmutableMap;
-import com.google.common.math.LongMath;
-import com.google.common.primitives.Booleans;
-import com.google.common.primitives.Doubles;
-import com.google.common.primitives.Longs;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.util.ByteUtil;
@@ -46,6 +40,12 @@ import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.NumberUtil;
 import org.apache.phoenix.util.StringUtil;
 
+import com.google.common.collect.ImmutableMap;
+import com.google.common.math.LongMath;
+import com.google.common.primitives.Booleans;
+import com.google.common.primitives.Doubles;
+import com.google.common.primitives.Longs;
+
 
 /**
  * The data types of PColumns
@@ -93,7 +93,8 @@ public enum PDataType {
             switch (actualType) {
             case VARCHAR:
             case CHAR:
-                return object;
+                String s = (String)object;
+                return s == null || s.length() > 0 ? s : null;
             default:
                 return super.toObject(object, actualType);
             }
@@ -217,7 +218,8 @@ public enum PDataType {
             switch (actualType) {
             case VARCHAR:
             case CHAR:
-                return object;
+                String s = (String)object;
+                return s == null || s.length() > 0 ? s : null;
             default:
                 return super.toObject(object, actualType);
             }

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/test/java/org/apache/phoenix/end2end/RowValueConstructorTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/end2end/RowValueConstructorTest.java b/phoenix-core/src/test/java/org/apache/phoenix/end2end/RowValueConstructorTest.java
index 12a948b..02bb201 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/end2end/RowValueConstructorTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/end2end/RowValueConstructorTest.java
@@ -53,11 +53,10 @@ import java.sql.SQLException;
 import java.sql.Timestamp;
 import java.util.Properties;
 
-import org.junit.Test;
-
 import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.TestUtil;
+import org.junit.Test;
 
 public class RowValueConstructorTest extends BaseClientManagedTimeTest {
     
@@ -784,7 +783,8 @@ public class RowValueConstructorTest extends BaseClientManagedTimeTest {
         long ts = nextTimestamp();
         String tenantId = getOrganizationId();
         initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
-        //we have a row present in aTable where organization_id = tenantId and  x_integer = 5 
+        // Though we have a row present in aTable where organization_id = tenantId and  x_integer = 5,
+        // we'd also need to have an entity_id that is null (which we don't have).
         String query = "SELECT organization_id, entity_id FROM aTable WHERE (organization_id, entity_id) IN (('" + tenantId + "')) AND x_integer = 5";
         Properties props = new Properties(TEST_PROPERTIES);
         props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 2)); // Execute at timestamp 2
@@ -795,9 +795,6 @@ public class RowValueConstructorTest extends BaseClientManagedTimeTest {
                 conn = DriverManager.getConnection(PHOENIX_JDBC_URL, props);
                 statement = conn.prepareStatement(query);
                 ResultSet rs = statement.executeQuery();
-                assertTrue(rs.next());
-                assertEquals(tenantId, rs.getString(1));
-                assertEquals(ROW7, rs.getString(2));
                 assertFalse(rs.next());
             } finally {
                 if(statement != null) {

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/test/java/org/apache/phoenix/end2end/index/SaltedIndexTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/end2end/index/SaltedIndexTest.java b/phoenix-core/src/test/java/org/apache/phoenix/end2end/index/SaltedIndexTest.java
index 115fe30..6936f15 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/end2end/index/SaltedIndexTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/end2end/index/SaltedIndexTest.java
@@ -47,7 +47,11 @@ public class SaltedIndexTest extends BaseIndexTest{
     
     @BeforeClass 
     public static void doSetup() throws Exception {
-        Map<String,String> props = Maps.newHashMapWithExpectedSize(1);
+        Map<String,String> props = Maps.newHashMapWithExpectedSize(3);
+        // Don't split intra region so we can more easily know that the n-way parallelization is for the explain plan
+        props.put(QueryServices.MAX_INTRA_REGION_PARALLELIZATION_ATTRIB, Integer.toString(1));
+        // Forces server cache to be used
+        props.put(QueryServices.INDEX_MUTATE_BATCH_SIZE_THRESHOLD_ATTRIB, Integer.toString(2));
         // Drop the HBase table metadata for this test
         props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true));
         // Must update config before starting server

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/ff1c1af6/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java b/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
index 0674b56..00a2a09 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/util/TestUtil.java
@@ -43,8 +43,6 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
-
-import com.google.common.collect.Lists;
 import org.apache.phoenix.compile.StatementContext;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.expression.AndExpression;
@@ -70,6 +68,8 @@ import org.apache.phoenix.schema.PDataType;
 import org.apache.phoenix.schema.RowKeyValueAccessor;
 import org.apache.phoenix.schema.tuple.Tuple;
 
+import com.google.common.collect.Lists;
+
 
 
 public class TestUtil {
@@ -244,7 +244,7 @@ public class TestUtil {
     }
 
     public static Expression in(Expression... expressions) throws SQLException {
-        return InListExpression.create(Arrays.asList(expressions), new ImmutableBytesWritable());
+        return InListExpression.create(Arrays.asList(expressions), new ImmutableBytesWritable(), false);
     }
 
     public static Expression in(Expression e, Object... literals) throws SQLException {
@@ -254,7 +254,7 @@ public class TestUtil {
         for (Object o : literals) {
             expressions.add(LiteralExpression.newConstant(o, childType));
         }
-        return InListExpression.create(expressions, new ImmutableBytesWritable());
+        return InListExpression.create(expressions, new ImmutableBytesWritable(), false);
     }
 
     public static void assertDegenerate(StatementContext context) {