You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ja...@apache.org on 2014/10/18 03:18:22 UTC

[3/4] git commit: PHOENIX-1297 Adding utility methods to get primary key information from the optimized query plan (Samarth Jain)

PHOENIX-1297 Adding utility methods to get primary key information from the optimized query plan (Samarth Jain)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/17eb70d8
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/17eb70d8
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/17eb70d8

Branch: refs/heads/4.0
Commit: 17eb70d84940534f6d67c3599929f15f65f437a6
Parents: cc38299
Author: James Taylor <jt...@salesforce.com>
Authored: Fri Oct 17 16:56:29 2014 -0700
Committer: James Taylor <jt...@salesforce.com>
Committed: Fri Oct 17 16:59:31 2014 -0700

----------------------------------------------------------------------
 .../org/apache/phoenix/end2end/ArrayIT.java     |   8 +-
 .../phoenix/end2end/PhoenixEncodeDecodeIT.java  | 215 -----------
 .../org/apache/phoenix/end2end/QueryMoreIT.java | 141 +++++---
 .../org/apache/phoenix/end2end/StddevIT.java    |  40 +--
 .../apache/phoenix/trace/BaseTracingTestIT.java |   3 +
 .../apache/phoenix/schema/KeyValueSchema.java   |   5 +-
 .../org/apache/phoenix/schema/PDataType.java    | 108 +++---
 .../java/org/apache/phoenix/util/IndexUtil.java |  23 ++
 .../org/apache/phoenix/util/PhoenixRuntime.java | 352 +++++++++++++++----
 .../org/apache/phoenix/util/SchemaUtil.java     |  28 ++
 .../phoenix/compile/QueryOptimizerTest.java     | 264 ++++++++++++++
 .../apache/phoenix/schema/ValueBitSetTest.java  |  43 +++
 .../phoenix/util/PhoenixEncodeDecodeTest.java   |  72 ++++
 .../apache/phoenix/util/PhoenixRuntimeTest.java |  77 +++-
 .../phoenix/pig/PhoenixHBaseLoaderIT.java       |   7 +-
 15 files changed, 943 insertions(+), 443 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayIT.java
index 3fb276c..803f150 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ArrayIT.java
@@ -44,7 +44,6 @@ import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.StringUtil;
-import org.junit.Assert;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -1299,13 +1298,8 @@ public class ArrayIT extends BaseClientManagedTimeIT {
 			PreparedStatement statement = conn.prepareStatement(query);
 			ResultSet rs = statement.executeQuery();
 			assertTrue(rs.next());
-			Double[] doubleArr = new Double[1];
-			doubleArr[0] = 36.763;
-			Array array = conn.createArrayOf("DOUBLE", doubleArr);
 			PhoenixArray resultArray = (PhoenixArray) rs.getArray(1);
-			assertEquals(resultArray, array);
-			Assert.fail("Should have failed");
-		} catch (Exception e) {
+			assertNull(resultArray);
 		} finally {
 			conn.close();
 		}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixEncodeDecodeIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixEncodeDecodeIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixEncodeDecodeIT.java
deleted file mode 100644
index bdb0745..0000000
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PhoenixEncodeDecodeIT.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- *distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you maynot use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicablelaw or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.end2end;
-
-import static org.junit.Assert.assertEquals;
-
-import java.sql.Connection;
-import java.sql.Date;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.util.Arrays;
-import java.util.Properties;
-
-import org.apache.phoenix.util.PhoenixRuntime;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-@Category(HBaseManagedTimeTest.class)
-public class PhoenixEncodeDecodeIT extends BaseHBaseManagedTimeIT {
-    
-    private static String tenantId = "ABC";
-    
-    @Test
-    public void testEncodeDecode() throws Exception {
-        Connection conn = DriverManager.getConnection(getUrl());
-        conn.createStatement().execute(
-                "CREATE TABLE t(org_id CHAR(3) not null, p_id CHAR(3) not null, date DATE not null, e_id CHAR(3) not null, old_value VARCHAR, new_value VARCHAR " +
-                "CONSTRAINT pk PRIMARY KEY (org_id, p_id, date, e_id))");
-        PreparedStatement stmt = conn.prepareStatement("UPSERT INTO t VALUES (?, ?, ?, ?, ?)");
-        Date date = new Date(System.currentTimeMillis());
-        stmt.setString(1,  "abc");
-        stmt.setString(2,  "def");
-        stmt.setDate(3,  date);
-        stmt.setString(4,  "eid");
-        stmt.setString(5, "old");
-        stmt.executeUpdate();
-        conn.commit();
-
-        stmt = conn.prepareStatement("SELECT org_id, p_id, date, e_id FROM T");
-
-        Object[] retrievedValues = new Object[4];
-        ResultSet rs = stmt.executeQuery();
-        rs.next();
-        retrievedValues[0] = rs.getString(1);
-        retrievedValues[1] = rs.getString(2);
-        retrievedValues[2] = rs.getDate(3);
-        retrievedValues[3] = rs.getString(4);
-
-        byte[] value = PhoenixRuntime.encodePK(conn, "T", retrievedValues);
-        Object[] decodedValues = PhoenixRuntime.decodePK(conn, "T", value);
-
-        assertEquals(Arrays.asList(decodedValues), Arrays.asList(retrievedValues));
-    }
-
-    @Test
-    public void testEncodeDecodeSalted() throws Exception {
-        Connection conn = DriverManager.getConnection(getUrl());
-        conn.createStatement().execute(
-                "CREATE TABLE t(org_id CHAR(3) not null, p_id CHAR(3) not null, date DATE not null, e_id CHAR(3) not null, old_value VARCHAR, new_value VARCHAR " +
-                "CONSTRAINT pk PRIMARY KEY (org_id, p_id, date, e_id)) SALT_BUCKETS = 2");
-        PreparedStatement stmt = conn.prepareStatement("UPSERT INTO t VALUES (?, ?, ?, ?, ?)");
-        Date date = new Date(System.currentTimeMillis());
-        stmt.setString(1,  "abc");
-        stmt.setString(2,  "def");
-        stmt.setDate(3,  date);
-        stmt.setString(4,  "eid");
-        stmt.setString(5, "old");
-        stmt.executeUpdate();
-        conn.commit();
-
-        stmt = conn.prepareStatement("SELECT org_id, p_id, date, e_id FROM T");
-
-        Object[] retrievedValues = new Object[4];
-        ResultSet rs = stmt.executeQuery();
-        rs.next();
-        retrievedValues[0] = rs.getString(1);
-        retrievedValues[1] = rs.getString(2);
-        retrievedValues[2] = rs.getDate(3);
-        retrievedValues[3] = rs.getString(4);
-
-        byte[] value = PhoenixRuntime.encodePK(conn, "T", retrievedValues);
-        Object[] decodedValues = PhoenixRuntime.decodePK(conn, "T", value);
-
-        assertEquals(Arrays.asList(decodedValues), Arrays.asList(retrievedValues));
-    }
-
-    @Test
-    public void testEncodeDecodeMultiTenant() throws Exception {
-        Connection globalConn = DriverManager.getConnection(getUrl());
-        try {
-            globalConn.createStatement().execute(
-                    "CREATE TABLE T(tenant_id CHAR(3) not null, p_id CHAR(3) not null, date DATE not null, e_id CHAR(3) not null, old_value VARCHAR, new_value VARCHAR " +
-                    "CONSTRAINT pk PRIMARY KEY (tenant_id, p_id, date, e_id)) MULTI_TENANT = true");
-        } finally {
-            globalConn.close();
-        }
-
-        Connection tenantConn = getTenantSpecificConnection();
-
-        //create tenant-specific view. 
-        tenantConn.createStatement().execute("CREATE VIEW TENANT_TABLE AS SELECT * FROM T");
-        
-        PreparedStatement stmt = tenantConn.prepareStatement("UPSERT INTO TENANT_TABLE (p_id, date, e_id) VALUES (?, ?, ?)");
-        Date date = new Date(System.currentTimeMillis());
-        stmt.setString(1,  "def");
-        stmt.setDate(2,  date);
-        stmt.setString(3,  "eid");
-        stmt.executeUpdate();
-        tenantConn.commit();
-
-        stmt = tenantConn.prepareStatement("SELECT p_id, date, e_id FROM TENANT_TABLE");
-
-        Object[] retrievedValues = new Object[3];
-        ResultSet rs = stmt.executeQuery();
-        rs.next();
-        retrievedValues[0] = rs.getString(1);
-        retrievedValues[1] = rs.getDate(2);
-        retrievedValues[2] = rs.getString(3);
-
-        byte[] value = PhoenixRuntime.encodePK(tenantConn, "TENANT_TABLE", retrievedValues);
-        Object[] decodedValues = PhoenixRuntime.decodePK(tenantConn, "TENANT_TABLE", value);
-
-        assertEquals(Arrays.asList(decodedValues), Arrays.asList(retrievedValues));
-    }
-
-    @Test
-    public void testEncodeDecodeSaltedMultiTenant() throws Exception {
-        Connection globalConn = DriverManager.getConnection(getUrl());
-        try {
-            globalConn.createStatement().execute(
-                    "CREATE TABLE T(tenant_id CHAR(3) not null, p_id CHAR(3) not null, date DATE not null, e_id CHAR(3) not null, old_value VARCHAR, new_value VARCHAR " +
-                    "CONSTRAINT pk PRIMARY KEY (tenant_id, p_id, date, e_id)) MULTI_TENANT = true, SALT_BUCKETS = 2");
-        } finally {
-            globalConn.close();
-        }
-
-        Connection tenantConn = getTenantSpecificConnection();
-
-        //create tenant-specific view. 
-        tenantConn.createStatement().execute("CREATE VIEW TENANT_TABLE AS SELECT * FROM T");
-        
-        PreparedStatement stmt = tenantConn.prepareStatement("UPSERT INTO TENANT_TABLE (p_id, date, e_id) VALUES (?, ?, ?)");
-        Date date = new Date(System.currentTimeMillis());
-        stmt.setString(1,  "def");
-        stmt.setDate(2,  date);
-        stmt.setString(3,  "eid");
-        stmt.executeUpdate();
-        tenantConn.commit();
-
-        stmt = tenantConn.prepareStatement("SELECT p_id, date, e_id FROM TENANT_TABLE");
-
-        Object[] retrievedValues = new Object[3];
-        ResultSet rs = stmt.executeQuery();
-        rs.next();
-        retrievedValues[0] = rs.getString(1);
-        retrievedValues[1] = rs.getDate(2);
-        retrievedValues[2] = rs.getString(3);
-
-        byte[] value = PhoenixRuntime.encodePK(tenantConn, "TENANT_TABLE", retrievedValues);
-        Object[] decodedValues = PhoenixRuntime.decodePK(tenantConn, "TENANT_TABLE", value);
-
-        assertEquals(Arrays.asList(decodedValues), Arrays.asList(retrievedValues));
-    }
-    
-    @Test
-    public void testEncodeDecodePaddingPks() throws Exception {
-        Connection conn = DriverManager.getConnection(getUrl());
-        conn.createStatement().execute(
-                "CREATE TABLE T(pk1 CHAR(15) not null, pk2 CHAR(15) not null, v1 DATE " +
-                "CONSTRAINT pk PRIMARY KEY (pk1, pk2))");
-        
-        PreparedStatement stmt = conn.prepareStatement("UPSERT INTO T (pk1, pk2, v1) VALUES (?, ?, ?)");
-        stmt.setString(1,  "def");
-        stmt.setString(2,  "eid");
-        stmt.setDate(3, new Date(100));
-        stmt.executeUpdate();
-        conn.commit();
-
-        stmt = conn.prepareStatement("SELECT pk1, pk2 FROM T");
-
-        Object[] retrievedValues = new Object[2];
-        ResultSet rs = stmt.executeQuery();
-        rs.next();
-        retrievedValues[0] = rs.getString(1);
-        retrievedValues[1] = rs.getString(2);
-        
-        byte[] value = PhoenixRuntime.encodePK(conn, "T", retrievedValues);
-        Object[] decodedValues = PhoenixRuntime.decodePK(conn, "T", value);
-
-        assertEquals(Arrays.asList(decodedValues), Arrays.asList(retrievedValues));
-    }
-
-    private static Connection getTenantSpecificConnection() throws Exception {
-        Properties props = new Properties();
-        props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId);
-        return DriverManager.getConnection(getUrl(), props);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
index 5173b0e..e82abbb 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryMoreIT.java
@@ -31,6 +31,7 @@ import java.util.Map;
 import java.util.Properties;
 
 import org.apache.hadoop.hbase.util.Base64;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -40,40 +41,41 @@ import com.google.common.collect.Lists;
 @Category(HBaseManagedTimeTest.class)
 public class QueryMoreIT extends BaseHBaseManagedTimeIT {
     
-    //Data table - multi-tenant = true, salted = true 
+    private String dataTableName;
+    //queryAgainstTenantSpecificView = true, dataTableSalted = true 
     @Test
     public void testQueryMore1() throws Exception {
         testQueryMore(true, true);
     }
     
-    //Data table - multi-tenant = false, salted = true 
+    //queryAgainstTenantSpecificView = false, dataTableSalted = true 
     @Test
     public void testQueryMore2() throws Exception {
         testQueryMore(false, true);
     }
     
-    //Data table - multi-tenant = false, salted = false
+    //queryAgainstTenantSpecificView = false, dataTableSalted = false
     @Test
     public void testQueryMore3() throws Exception {
         testQueryMore(false, false);
     }
     
-    //Data table - multi-tenant = true, salted = false 
+    //queryAgainstTenantSpecificView = true, dataTableSalted = false 
     @Test
     public void testQueryMore4() throws Exception {
         testQueryMore(true, false);
     }
     
-    private void testQueryMore(boolean dataTableMultiTenant, boolean dataTableSalted) throws Exception {
+    private void testQueryMore(boolean queryAgainstTenantSpecificView, boolean dataTableSalted) throws Exception {
         String[] tenantIds = new String[] {"00Dxxxxxtenant1", "00Dxxxxxtenant2", "00Dxxxxxtenant3"};
         int numRowsPerTenant = 10;
         String cursorTableName = "CURSOR_TABLE";
-        String dataTableName = "BASE_HISTORY_TABLE" + (dataTableMultiTenant ? "_MULTI" : "") + (dataTableSalted ? "_SALTED" : "");
+        this.dataTableName = "BASE_HISTORY_TABLE" + (dataTableSalted ? "_SALTED" : "");
         String cursorTableDDL = "CREATE TABLE IF NOT EXISTS " + 
                 cursorTableName +  " (\n" +  
                 "TENANT_ID VARCHAR(15) NOT NULL\n," +  
                 "QUERY_ID VARCHAR(15) NOT NULL,\n" +
-                "CURSOR_ORDER BIGINT NOT NULL\n" + 
+                "CURSOR_ORDER BIGINT NOT NULL \n" + 
                 "CONSTRAINT CURSOR_TABLE_PK PRIMARY KEY (TENANT_ID, QUERY_ID, CURSOR_ORDER)) "+
                 "SALT_BUCKETS = 4, TTL=86400";
         String baseDataTableDDL = "CREATE TABLE IF NOT EXISTS " +
@@ -86,7 +88,7 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
                 "OLDVAL_STRING VARCHAR,\n" + 
                 "NEWVAL_STRING VARCHAR\n" + 
                 "CONSTRAINT PK PRIMARY KEY(TENANT_ID, PARENT_ID, CREATED_DATE DESC, ENTITY_HISTORY_ID)) " + 
-                "VERSIONS = 1, MULTI_TENANT = true, SALT_BUCKETS = 4";
+                "VERSIONS = 1, MULTI_TENANT = true" + (dataTableSalted ? ", SALT_BUCKETS = 4" : "");
         
         //create cursor and data tables.
         Connection conn = DriverManager.getConnection(getUrl());
@@ -94,14 +96,36 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
         conn.createStatement().execute(baseDataTableDDL);
         conn.close();
         
-        //upsert rows in the data table.
+        //upsert rows in the data table for all the tenantIds
         Map<String, List<String>> historyIdsPerTenant = createHistoryTableRows(dataTableName, tenantIds, numRowsPerTenant);
         
+        // assert query more for tenantId -> tenantIds[0]
         String tenantId = tenantIds[0];
         String cursorQueryId = "00TcursrqueryId";
-        String tenantViewName = dataTableMultiTenant ? ("HISTORY_TABLE" + "_" + tenantId) : null;
-        assertEquals(numRowsPerTenant, upsertSelectRecordsInCursorTableForTenant(dataTableName, dataTableMultiTenant, tenantId, tenantViewName, cursorQueryId));
+        String tableOrViewName = queryAgainstTenantSpecificView ? ("\"HISTORY_TABLE" + "_" + tenantId + "\"") : dataTableName;
         
+        assertEquals(numRowsPerTenant, upsertSelectRecordsInCursorTableForTenant(tableOrViewName, queryAgainstTenantSpecificView, tenantId, cursorQueryId));
+        
+        /*// assert that the data inserted in cursor table matches the data in the data table for tenantId.
+        String selectDataTable = "SELECT TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID FROM BASE_HISTORY_TABLE WHERE TENANT_ID = ? ";
+        String selectCursorTable = "SELECT TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID FROM  CURSOR_TABLE (PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15)) WHERE TENANT_ID = ? ";
+        
+        PreparedStatement stmtData = DriverManager.getConnection(getUrl()).prepareStatement(selectDataTable);
+        stmtData.setString(1, tenantId);
+        ResultSet rsData = stmtData.executeQuery();
+        
+        PreparedStatement stmtCursor = DriverManager.getConnection(getUrl()).prepareStatement(selectCursorTable);
+        stmtCursor.setString(1, tenantId);
+        ResultSet rsCursor = stmtCursor.executeQuery();
+        
+        while(rsData.next() && rsCursor.next()) {
+            assertEquals(rsData.getString("TENANT_ID"), rsCursor.getString("TENANT_ID"));
+            assertEquals(rsData.getString("PARENT_ID"), rsCursor.getString("PARENT_ID"));
+            assertEquals(rsData.getDate("CREATED_DATE"), rsCursor.getDate("CREATED_DATE"));
+            assertEquals(rsData.getString("ENTITY_HISTORY_ID"), rsCursor.getString("ENTITY_HISTORY_ID"));
+        }
+        
+        */
         Connection conn2 = DriverManager.getConnection(getUrl());
         ResultSet rs = conn2.createStatement().executeQuery("SELECT count(*) from " + cursorTableName);
         rs.next();
@@ -110,20 +134,28 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
         
         int startOrder = 0;
         int endOrder = 5;
-        int numRecordsThatShouldBeRetrieved = 5;
+        int numRecordsThatShouldBeRetrieved = numRowsPerTenant/2; // we will test for two rounds of query more.
         
-        //get first batch of cursor ids out of the cursor table.
-        String[] cursorIds = getRecordsOutofCursorTable(dataTableName, tenantId, cursorQueryId, startOrder, endOrder, numRecordsThatShouldBeRetrieved);
+        // get first batch of cursor ids out of the cursor table.
+        String[] cursorIds = getRecordsOutofCursorTable(tableOrViewName, queryAgainstTenantSpecificView, tenantId, cursorQueryId, startOrder, endOrder);
         assertEquals(numRecordsThatShouldBeRetrieved, cursorIds.length);
-        
-        //now query against the tenant view and fetch first batch of records.
-        List<String> historyIds = doQueryMore(dataTableName, dataTableMultiTenant, tenantId, tenantViewName, cursorIds);
+        // now query and fetch first batch of records.
+        List<String> historyIds = doQueryMore(queryAgainstTenantSpecificView, tenantId, tableOrViewName, cursorIds);
+        // assert that history ids match for this tenant
         assertEquals(historyIdsPerTenant.get(tenantId).subList(startOrder, endOrder), historyIds);
         
-        cursorIds = getRecordsOutofCursorTable(dataTableName, tenantId, cursorQueryId, startOrder + 5, endOrder + 5, numRecordsThatShouldBeRetrieved);
+        // get the next batch of cursor ids out of the cursor table.
+        cursorIds = getRecordsOutofCursorTable(tableOrViewName, queryAgainstTenantSpecificView, tenantId, cursorQueryId, startOrder + numRecordsThatShouldBeRetrieved, endOrder + numRecordsThatShouldBeRetrieved);
         assertEquals(numRecordsThatShouldBeRetrieved, cursorIds.length);
-        historyIds = doQueryMore(dataTableName, dataTableMultiTenant, tenantId, tenantViewName, cursorIds);
-        assertEquals(historyIdsPerTenant.get(tenantId).subList(startOrder + 5, endOrder+ 5), historyIds);
+        // now query and fetch the next batch of records.
+        historyIds = doQueryMore(queryAgainstTenantSpecificView, tenantId, tableOrViewName, cursorIds);
+        // assert that the history ids match for this tenant
+        assertEquals(historyIdsPerTenant.get(tenantId).subList(startOrder + numRecordsThatShouldBeRetrieved, endOrder+ numRecordsThatShouldBeRetrieved), historyIds);
+        
+         // get the next batch of cursor ids out of the cursor table.
+        cursorIds = getRecordsOutofCursorTable(tableOrViewName, queryAgainstTenantSpecificView, tenantId, cursorQueryId, startOrder + 2 * numRecordsThatShouldBeRetrieved, endOrder + 2 * numRecordsThatShouldBeRetrieved);
+        // assert that there are no more cursorids left for this tenant.
+        assertEquals(0, cursorIds.length);
     }
     
     private Map<String, List<String>> createHistoryTableRows(String dataTableName, String[] tenantIds, int numRowsPerTenant) throws Exception {
@@ -133,7 +165,7 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
         try {
             PreparedStatement stmt = conn.prepareStatement(upsertDML);
             for (int j = 0; j < tenantIds.length; j++) {
-                List<String> parentIds = new ArrayList<String>();
+                List<String> historyIds = new ArrayList<String>();
                 for (int i = 0; i < numRowsPerTenant; i++) {
                     stmt.setString(1, tenantIds[j]);
                     String parentId = "parentId" + i;
@@ -145,9 +177,9 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
                     stmt.setString(6, "oldval");
                     stmt.setString(7, "newval");
                     stmt.executeUpdate();
-                    parentIds.add(historyId);
+                    historyIds.add(historyId);
                 }
-                historyIdsForTenant.put(tenantIds[j], parentIds);
+                historyIdsForTenant.put(tenantIds[j], historyIds);
             }
             conn.commit();
             return historyIdsForTenant;
@@ -156,29 +188,29 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
         }
     }
     
-    private int upsertSelectRecordsInCursorTableForTenant(String baseTableName, boolean dataTableMultiTenant, String tenantId, String tenantViewName, String cursorQueryId) throws Exception {
+    private int upsertSelectRecordsInCursorTableForTenant(String tableOrViewName, boolean queryAgainstTenantView, String tenantId, String cursorQueryId) throws Exception {
         String sequenceName = "\"" + tenantId + "_SEQ\"";
-        Connection conn = dataTableMultiTenant ? getTenantSpecificConnection(tenantId) : DriverManager.getConnection(getUrl());
+        Connection conn = queryAgainstTenantView ? getTenantSpecificConnection(tenantId) : DriverManager.getConnection(getUrl());
         
         // Create a sequence. This sequence is used to fill cursor_order column for each row inserted in the cursor table.
         conn.createStatement().execute("CREATE SEQUENCE " + sequenceName + " CACHE " + Long.MAX_VALUE);
         conn.setAutoCommit(true);
-        if (dataTableMultiTenant) {
-            createTenantSpecificViewIfNecessary(baseTableName, tenantViewName, conn);
+        if (queryAgainstTenantView) {
+            createTenantSpecificViewIfNecessary(tableOrViewName, conn);
         }
         try {
-            String tableName = dataTableMultiTenant ? tenantViewName : baseTableName;
-            String tenantIdFilter = dataTableMultiTenant ? "" : " WHERE TENANT_ID = ? ";
+            String tenantIdFilter = queryAgainstTenantView ? "" : " WHERE TENANT_ID = ? ";
             
             // Using dynamic columns, we can use the same cursor table for storing primary keys for all the tables.  
             String upsertSelectDML = "UPSERT INTO CURSOR_TABLE " +
                                      "(TENANT_ID, QUERY_ID, CURSOR_ORDER, PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15)) " + 
                                      "SELECT ?, ?, NEXT VALUE FOR " + sequenceName + ", PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID " +
-                                     " FROM " + tableName + tenantIdFilter;
+                                     " FROM " + tableOrViewName + tenantIdFilter;
+            
             PreparedStatement stmt = conn.prepareStatement(upsertSelectDML);
             stmt.setString(1, tenantId);
             stmt.setString(2, cursorQueryId);
-            if (!dataTableMultiTenant)  {
+            if (!queryAgainstTenantView)  {
                 stmt.setString(3, tenantId);
             }
             int numRecords = stmt.executeUpdate();
@@ -198,18 +230,19 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
         return DriverManager.getConnection(getUrl(), props);
     }
     
-    private String createTenantSpecificViewIfNecessary(String baseTableName, String tenantViewName, Connection tenantConn) throws Exception {
-        tenantConn.createStatement().execute("CREATE VIEW IF NOT EXISTS " + tenantViewName + " AS SELECT * FROM " + baseTableName);
+    private String createTenantSpecificViewIfNecessary(String tenantViewName, Connection tenantConn) throws Exception {
+        tenantConn.createStatement().execute("CREATE VIEW IF NOT EXISTS " + tenantViewName + " AS SELECT * FROM " + dataTableName);
         return tenantViewName;
     }
     
-    private String[] getRecordsOutofCursorTable(String dataTableName, String tenantId, String cursorQueryId, int startOrder, int endOrder, int numRecordsThatShouldBeRetrieved) throws Exception {
+    private String[] getRecordsOutofCursorTable(String tableOrViewName, boolean queryAgainstTenantSpecificView, String tenantId, String cursorQueryId, int startOrder, int endOrder) throws Exception {
         Connection conn = DriverManager.getConnection(getUrl());
-        List<String> pkIds = Lists.newArrayListWithCapacity(numRecordsThatShouldBeRetrieved);
-
-        String selectCursorSql = "SELECT TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID " +
+        List<String> pkIds = new ArrayList<String>();
+        String cols = queryAgainstTenantSpecificView ? "PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID" : "TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID";
+        String dynCols = queryAgainstTenantSpecificView ? "(PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15))" : "(TENANT_ID CHAR(15), PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15))";
+        String selectCursorSql = "SELECT " + cols + " " +
                 "FROM CURSOR_TABLE \n" +
-                "(TENANT_ID CHAR(15), PARENT_ID CHAR(15), CREATED_DATE DATE, ENTITY_HISTORY_ID CHAR(15)) \n" + 
+                 dynCols +   " \n" + 
                 "WHERE TENANT_ID = ? AND \n" +  
                 "QUERY_ID = ? AND \n" + 
                 "CURSOR_ORDER > ? AND \n" + 
@@ -222,33 +255,34 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
         stmt.setInt(4, endOrder);
 
         ResultSet rs = stmt.executeQuery();
+        @SuppressWarnings("unchecked")
+        List<Pair<String, String>> columns = queryAgainstTenantSpecificView ? Lists.newArrayList(new Pair<String, String>(null, "PARENT_ID"), new Pair<String, String>(null, "CREATED_DATE"), new Pair<String, String>(null, "ENTITY_HISTORY_ID")) : Lists.newArrayList(new Pair<String, String>(null, "TENANT_ID"), new Pair<String, String>(null, "PARENT_ID"), new Pair<String, String>(null, "CREATED_DATE"), new Pair<String, String>(null, "ENTITY_HISTORY_ID"));
         while(rs.next()) {
-            Object[] values = new Object[4];
-            for (int i = 0; i < 4; i++) {
+            Object[] values = new Object[columns.size()];
+            for (int i = 0; i < columns.size(); i++) {
                 values[i] = rs.getObject(i + 1);
             }
-            pkIds.add(Base64.encodeBytes(PhoenixRuntime.encodePK(conn, dataTableName, values)));
+            conn = getTenantSpecificConnection(tenantId);
+            pkIds.add(Base64.encodeBytes(PhoenixRuntime.encodeValues(conn, tableOrViewName, values, columns)));
         }
         return pkIds.toArray(new String[pkIds.size()]);
     }
     
-    private List<String> doQueryMore(String dataTableName, boolean dataTableMultiTenant, String tenantId, String tenantViewName, String[] cursorIds) throws Exception {
-        Connection tenantConn = dataTableMultiTenant ? getTenantSpecificConnection(tenantId) : DriverManager.getConnection(getUrl());
-        String tableName = dataTableMultiTenant ? tenantViewName : dataTableName;
+    private List<String> doQueryMore(boolean queryAgainstTenantView, String tenantId, String tenantViewName, String[] cursorIds) throws Exception {
+        Connection conn = queryAgainstTenantView ? getTenantSpecificConnection(tenantId) : DriverManager.getConnection(getUrl());
+        String tableName = queryAgainstTenantView ? tenantViewName : dataTableName;
+        @SuppressWarnings("unchecked")
+        List<Pair<String, String>> columns = queryAgainstTenantView ? Lists.newArrayList(new Pair<String, String>(null, "PARENT_ID"), new Pair<String, String>(null, "CREATED_DATE"), new Pair<String, String>(null, "ENTITY_HISTORY_ID")) : Lists.newArrayList(new Pair<String, String>(null, "TENANT_ID"), new Pair<String, String>(null, "PARENT_ID"), new Pair<String, String>(null, "CREATED_DATE"), new Pair<String, String>(null, "ENTITY_HISTORY_ID"));
         StringBuilder sb = new StringBuilder();
-        String where = dataTableMultiTenant ? " WHERE (PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID) IN " : " WHERE (TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID) IN ";
+        String where = queryAgainstTenantView ? " WHERE (PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID) IN " : " WHERE (TENANT_ID, PARENT_ID, CREATED_DATE, ENTITY_HISTORY_ID) IN ";
         sb.append("SELECT ENTITY_HISTORY_ID FROM " + tableName +  where);
-        int numPkCols = dataTableMultiTenant ? 3 : 4;
+        int numPkCols = columns.size();
         String query = addRvcInBinds(sb, cursorIds.length, numPkCols);
-        PreparedStatement stmt = tenantConn.prepareStatement(query);
+        PreparedStatement stmt = conn.prepareStatement(query);
         int bindCounter = 1;
         for (int i = 0; i < cursorIds.length; i++) {
-            Connection globalConn = DriverManager.getConnection(getUrl());
-            Object[] pkParts = PhoenixRuntime.decodePK(globalConn, dataTableName, Base64.decode(cursorIds[i]));
-            globalConn.close();
-            //start at index 1 to  ignore organizationId.
-            int offset = dataTableMultiTenant ? 1 : 0;
-            for (int j = offset; j < pkParts.length; j++) {
+            Object[] pkParts = PhoenixRuntime.decodeValues(conn, tableName, Base64.decode(cursorIds[i]), columns);
+            for (int j = 0; j < pkParts.length; j++) {
                 stmt.setObject(bindCounter++, pkParts[j]);
             }
         }
@@ -281,5 +315,4 @@ public class QueryMoreIT extends BaseHBaseManagedTimeIT {
         sb.append(")");
         return sb.toString();
     }
-    
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/it/java/org/apache/phoenix/end2end/StddevIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StddevIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StddevIT.java
index b4384f6..f3fef4b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/StddevIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/StddevIT.java
@@ -17,7 +17,6 @@
  */
 package org.apache.phoenix.end2end;
 
-import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -28,27 +27,20 @@ import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
-import java.util.Properties;
 
-import org.apache.phoenix.util.PhoenixRuntime;
-import org.apache.phoenix.util.PropertiesUtil;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-@Category(ClientManagedTimeTest.class)
-public class StddevIT extends BaseClientManagedTimeIT {
+@Category(HBaseManagedTimeTest.class)
+public class StddevIT extends BaseHBaseManagedTimeIT {
 
     @Test
     public void testSTDDEV_POP() throws Exception {
-        long ts = nextTimestamp();
         String tenantId = getOrganizationId();
-        initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
+        initATableValues(tenantId, getDefaultSplits(tenantId), getUrl());
 
         String query = "SELECT STDDEV_POP(A_INTEGER) FROM aTable";
 
-        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 2)); // Execute at
-                                                                                     // timestamp 2
-        Connection conn = DriverManager.getConnection(getUrl(), props);
+        Connection conn = DriverManager.getConnection(getUrl());
         try {
             PreparedStatement statement = conn.prepareStatement(query);
             ResultSet rs = statement.executeQuery();
@@ -64,16 +56,12 @@ public class StddevIT extends BaseClientManagedTimeIT {
     
     @Test
     public void testSTDDEV_SAMP() throws Exception {
-        long ts = nextTimestamp();
         String tenantId = getOrganizationId();
-        initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
+        initATableValues(tenantId, getDefaultSplits(tenantId), getUrl());
 
         String query = "SELECT STDDEV_SAMP(x_decimal) FROM aTable";
 
-        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 2)); // Execute at
-                                                                                     // timestamp 2
-        Connection conn = DriverManager.getConnection(getUrl(), props);
+        Connection conn = DriverManager.getConnection(getUrl());
         try {
             PreparedStatement statement = conn.prepareStatement(query);
             ResultSet rs = statement.executeQuery();
@@ -89,16 +77,12 @@ public class StddevIT extends BaseClientManagedTimeIT {
 
     @Test
     public void testSTDDEV_POPOnDecimalColType() throws Exception {
-        long ts = nextTimestamp();
         String tenantId = getOrganizationId();
-        initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
+        initATableValues(tenantId, getDefaultSplits(tenantId), getUrl());
 
         String query = "SELECT STDDEV_POP(x_decimal) FROM aTable";
 
-        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 2)); // Execute at
-                                                                                     // timestamp 2
-        Connection conn = DriverManager.getConnection(getUrl(), props);
+        Connection conn = DriverManager.getConnection(getUrl());
         try {
             PreparedStatement statement = conn.prepareStatement(query);
             ResultSet rs = statement.executeQuery();
@@ -114,16 +98,12 @@ public class StddevIT extends BaseClientManagedTimeIT {
 
     @Test
     public void testSTDDEV_SAMPOnDecimalColType() throws Exception {
-        long ts = nextTimestamp();
         String tenantId = getOrganizationId();
-        initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
+        initATableValues(tenantId, getDefaultSplits(tenantId), getUrl());
 
         String query = "SELECT STDDEV_SAMP(x_decimal) FROM aTable";
 
-        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
-        props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 2)); // Execute at
-                                                                                     // timestamp 2
-        Connection conn = DriverManager.getConnection(getUrl(), props);
+        Connection conn = DriverManager.getConnection(getUrl());
         try {
             PreparedStatement statement = conn.prepareStatement(query);
             ResultSet rs = statement.executeQuery();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/it/java/org/apache/phoenix/trace/BaseTracingTestIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/trace/BaseTracingTestIT.java b/phoenix-core/src/it/java/org/apache/phoenix/trace/BaseTracingTestIT.java
index 92b2250..0f8a666 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/trace/BaseTracingTestIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/trace/BaseTracingTestIT.java
@@ -30,6 +30,7 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
+import org.apache.phoenix.end2end.HBaseManagedTimeTest;
 import org.apache.phoenix.metrics.MetricInfo;
 import org.apache.phoenix.metrics.Metrics;
 import org.apache.phoenix.metrics.PhoenixAbstractMetric;
@@ -42,11 +43,13 @@ import org.apache.phoenix.trace.util.Tracing.Frequency;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.junit.Before;
+import org.junit.experimental.categories.Category;
 
 /**
  * Base test for tracing tests - helps manage getting tracing/non-tracing
  * connections, as well as any supporting utils.
  */
+@Category(HBaseManagedTimeTest.class)
 public class BaseTracingTestIT extends BaseHBaseManagedTimeIT {
     private static final Log LOG = LogFactory.getLog(BaseTracingTestIT.class);
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/main/java/org/apache/phoenix/schema/KeyValueSchema.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/KeyValueSchema.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/KeyValueSchema.java
index b668f5f..d6c36c0 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/KeyValueSchema.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/KeyValueSchema.java
@@ -22,7 +22,6 @@ import java.util.List;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.http.annotation.Immutable;
-
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.schema.tuple.Tuple;
 import org.apache.phoenix.util.ByteUtil;
@@ -65,7 +64,7 @@ public class KeyValueSchema extends ValueSchema {
         }
         
         public KeyValueSchemaBuilder addField(PDatum datum) {
-            super.addField(datum, fields.size() <  this.minNullable, SortOrder.getDefault());
+            super.addField(datum, fields.size() >=  this.minNullable, SortOrder.getDefault());
             return this;
         }
     }
@@ -107,7 +106,7 @@ public class KeyValueSchema extends ValueSchema {
             Field field = fields.get(i);
             PDataType type = field.getDataType();
             for (int j = 0; j < field.getCount(); j++) {
-                if (expressions[index].evaluate(tuple, ptr)) { // Skip null values
+                if (expressions[index].evaluate(tuple, ptr) && ptr.getLength() > 0) { // Skip null values
                     if (index >= minNullableIndex) {
                         valueSet.set(index - minNullableIndex);
                     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java
index 614eb6a..fa588b8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PDataType.java
@@ -3402,7 +3402,7 @@ public enum PDataType {
             return VARBINARY.getSampleValue(maxLength, arrayLength);
         }
     },
-    INTEGER_ARRAY("INTEGER_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.INTEGER.getSqlType(), PhoenixArray.class, null) {
+    INTEGER_ARRAY("INTEGER ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.INTEGER.getSqlType(), PhoenixArray.class, null) {
     	@Override
     	public boolean isArrayType() {
     		return true;
@@ -3491,7 +3491,7 @@ public enum PDataType {
         }
 		
 	},
-    BOOLEAN_ARRAY("BOOLEAN_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.BOOLEAN.getSqlType(), PhoenixArray.class, null) {
+	BOOLEAN_ARRAY("BOOLEAN ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.BOOLEAN.getSqlType(), PhoenixArray.class, null) {
     	@Override
     	public boolean isArrayType() {
     		return true;
@@ -3579,7 +3579,7 @@ public enum PDataType {
         }
 		
 	},
-	VARCHAR_ARRAY("VARCHAR_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.VARCHAR.getSqlType(), PhoenixArray.class, null) {
+	VARCHAR_ARRAY("VARCHAR ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.VARCHAR.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -3673,7 +3673,7 @@ public enum PDataType {
         }
 		
 	},
-	VARBINARY_ARRAY("VARBINARY_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.VARBINARY.getSqlType(), PhoenixArray.class, null) {
+	VARBINARY_ARRAY("VARBINARY ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.VARBINARY.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -3767,7 +3767,7 @@ public enum PDataType {
             return pDataTypeForArray.getSampleValue(PDataType.VARBINARY, arrayLength, maxLength);
         }
 	},
-	BINARY_ARRAY("BINARY_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.BINARY.getSqlType(), PhoenixArray.class, null) {
+	BINARY_ARRAY("BINARY ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.BINARY.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -3861,7 +3861,7 @@ public enum PDataType {
             return pDataTypeForArray.getSampleValue(PDataType.BINARY, arrayLength, maxLength);
         }
     },
-	CHAR_ARRAY("CHAR_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.CHAR.getSqlType(), PhoenixArray.class, null) {
+    CHAR_ARRAY("CHAR ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.CHAR.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -3956,7 +3956,7 @@ public enum PDataType {
         }
 		
 	},
-	LONG_ARRAY("LONG_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.LONG.getSqlType(), PhoenixArray.class, null) {
+	LONG_ARRAY("BIGINT ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.LONG.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4043,7 +4043,7 @@ public enum PDataType {
         }
 		
 	},
-	SMALLINT_ARRAY("SMALLINT_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.SMALLINT.getSqlType(), PhoenixArray.class, null) {
+	SMALLINT_ARRAY("SMALLINT ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.SMALLINT.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4130,7 +4130,7 @@ public enum PDataType {
         }
 		
 	},
-	TINYINT_ARRAY("TINYINT_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.TINYINT.getSqlType(), PhoenixArray.class, null) {
+	TINYINT_ARRAY("TINYINT ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.TINYINT.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4217,7 +4217,7 @@ public enum PDataType {
         }
 		
 	},
-	FLOAT_ARRAY("FLOAT_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.FLOAT.getSqlType(), PhoenixArray.class, null) {
+	FLOAT_ARRAY("FLOAT ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.FLOAT.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4305,7 +4305,7 @@ public enum PDataType {
         }
 		
 	},
-	DOUBLE_ARRAY("DOUBLE_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.DOUBLE.getSqlType(), PhoenixArray.class, null) {
+	DOUBLE_ARRAY("DOUBLE ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.DOUBLE.getSqlType(), PhoenixArray.class, null) {
 	    final PArrayDataType pDataTypeForArray = new PArrayDataType();
 		@Override
     	public boolean isArrayType() {
@@ -4394,7 +4394,7 @@ public enum PDataType {
 
 	},
 	
-	DECIMAL_ARRAY("DECIMAL_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.DECIMAL.getSqlType(), PhoenixArray.class, null) {
+	DECIMAL_ARRAY("DECIMAL ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.DECIMAL.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4489,8 +4489,7 @@ public enum PDataType {
         }
 	
 	},
-	TIMESTAMP_ARRAY("TIMESTAMP_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.TIMESTAMP.getSqlType(), PhoenixArray.class,
-			null) {
+	TIMESTAMP_ARRAY("TIMESTAMP ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.TIMESTAMP.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4577,8 +4576,7 @@ public enum PDataType {
         }
 
 	},
-	UNSIGNED_TIMESTAMP_ARRAY("UNSIGNED_TIMESTAMP_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_TIMESTAMP.getSqlType(), PhoenixArray.class,
-			null) {
+	UNSIGNED_TIMESTAMP_ARRAY("UNSIGNED_TIMESTAMP ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_TIMESTAMP.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4665,7 +4663,7 @@ public enum PDataType {
         }
 
 	},
-	TIME_ARRAY("TIME_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.TIME.getSqlType(), PhoenixArray.class, null) {
+	TIME_ARRAY("TIME ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.TIME.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4752,7 +4750,7 @@ public enum PDataType {
         }
 
 	},
-	UNSIGNED_TIME_ARRAY("UNSIGNED_TIME_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_TIME.getSqlType(), PhoenixArray.class, null) {
+	UNSIGNED_TIME_ARRAY("UNSIGNED_TIME ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_TIME.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4839,7 +4837,7 @@ public enum PDataType {
         }
 
 	},
-	DATE_ARRAY("DATE_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.DATE.getSqlType(), PhoenixArray.class, null) {
+	DATE_ARRAY("DATE ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.DATE.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -4926,7 +4924,7 @@ public enum PDataType {
         }
 
 	},
-	UNSIGNED_DATE_ARRAY("UNSIGNED_DATE_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_DATE.getSqlType(), PhoenixArray.class, null) {
+	UNSIGNED_DATE_ARRAY("UNSIGNED_DATE ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_DATE.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -5013,7 +5011,7 @@ public enum PDataType {
         }
 
 	},
-	UNSIGNED_LONG_ARRAY("UNSIGNED_LONG_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_LONG.getSqlType(), PhoenixArray.class, null) {
+	UNSIGNED_LONG_ARRAY("UNSIGNED_LONG ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_LONG.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -5100,7 +5098,7 @@ public enum PDataType {
         }
 	
 	},
-	UNSIGNED_INT_ARRAY("UNSIGNED_INT_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_INT.getSqlType(), PhoenixArray.class, null) {
+	UNSIGNED_INT_ARRAY("UNSIGNED_INT ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_INT.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -5187,8 +5185,7 @@ public enum PDataType {
         }
 
 	},
-	UNSIGNED_SMALLINT_ARRAY("UNSIGNED_SMALLINT_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_SMALLINT.getSqlType(),
-			PhoenixArray.class, null) {
+	UNSIGNED_SMALLINT_ARRAY("UNSIGNED_SMALLINT ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_SMALLINT.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -5275,8 +5272,7 @@ public enum PDataType {
         }
 
 	},
-	UNSIGNED_TINYINT_ARRAY("UNSIGNED_TINYINT__ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_TINYINT.getSqlType(), PhoenixArray.class,
-			null) {
+	UNSIGNED_TINYINT_ARRAY("UNSIGNED_TINYINT ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_TINYINT.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -5362,7 +5358,7 @@ public enum PDataType {
             return pDataTypeForArray.getSampleValue(PDataType.UNSIGNED_TINYINT, arrayLength, maxLength);
         }
 	},
-	UNSIGNED_FLOAT_ARRAY("UNSIGNED_FLOAT_ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_FLOAT.getSqlType(), PhoenixArray.class, null) {
+	UNSIGNED_FLOAT_ARRAY("UNSIGNED_FLOAT ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_FLOAT.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -5449,8 +5445,7 @@ public enum PDataType {
         }
 
 	},
-	UNSIGNED_DOUBLE_ARRAY("UNSIGNED_DOUBLE__ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_DOUBLE.getSqlType(), PhoenixArray.class,
-			null) {
+	UNSIGNED_DOUBLE_ARRAY("UNSIGNED_DOUBLE ARRAY", PDataType.ARRAY_TYPE_BASE + PDataType.UNSIGNED_DOUBLE.getSqlType(), PhoenixArray.class, null) {
 		@Override
     	public boolean isArrayType() {
     		return true;
@@ -5551,6 +5546,28 @@ public enum PDataType {
     private final PDataCodec codec;
     final PArrayDataType pDataTypeForArray = new PArrayDataType();
     
+    private static final int SQL_TYPE_OFFSET;
+    private static final PDataType[] SQL_TYPE_TO_PCOLUMN_DATA_TYPE;
+    static {
+        int minSqlType = Integer.MAX_VALUE;
+        int maxSqlType = Integer.MIN_VALUE;
+        for (PDataType dataType : PDataType.values()) {
+            int sqlType = dataType.getSqlType();
+            if (sqlType < minSqlType) {
+                minSqlType = sqlType;
+            }
+            if (sqlType > maxSqlType) {
+                maxSqlType = sqlType;
+            }
+        }
+        SQL_TYPE_OFFSET = minSqlType;
+        SQL_TYPE_TO_PCOLUMN_DATA_TYPE = new PDataType[maxSqlType-minSqlType+1];
+        for (PDataType dataType : PDataType.values()) {
+            int sqlType = dataType.getSqlType();
+            SQL_TYPE_TO_PCOLUMN_DATA_TYPE[sqlType-SQL_TYPE_OFFSET] = dataType;
+        }
+    }
+    
     private PDataType(String sqlTypeName, int sqlType, Class clazz, PDataCodec codec) {
         this.sqlTypeName = sqlTypeName;
         this.sqlType = sqlType;
@@ -5559,7 +5576,7 @@ public enum PDataType {
         this.sqlTypeNameBytes = Bytes.toBytes(sqlTypeName);
         this.codec = codec;
     }
-
+    
     public boolean isCastableTo(PDataType targetType) {
         return isComparableTo(targetType);
     }
@@ -6767,6 +6784,7 @@ public enum PDataType {
     public final static Integer BYTE_PRECISION = 3;
 
     public static final int ARRAY_TYPE_BASE = 3000;
+    public static final String ARRAY_TYPE_SUFFIX = "ARRAY";
     
     private static final ThreadLocal<Random> RANDOM = new ThreadLocal<Random>(){
         @Override 
@@ -7232,30 +7250,7 @@ public enum PDataType {
     	return fromSqlTypeName.getSqlType() + PDataType.ARRAY_TYPE_BASE;
     }
 
-    private static final int SQL_TYPE_OFFSET;
-    private static final PDataType[] SQL_TYPE_TO_PCOLUMN_DATA_TYPE;
-    static {
-        int minSqlType = Integer.MAX_VALUE;
-        int maxSqlType = Integer.MIN_VALUE;
-        for (PDataType dataType : PDataType.values()) {
-            int sqlType = dataType.getSqlType();
-            if (sqlType < minSqlType) {
-                minSqlType = sqlType;
-            }
-            if (sqlType > maxSqlType) {
-                maxSqlType = sqlType;
-            }
-        }
-        SQL_TYPE_OFFSET = minSqlType;
-        SQL_TYPE_TO_PCOLUMN_DATA_TYPE = new PDataType[maxSqlType-minSqlType+1];
-        for (PDataType dataType : PDataType.values()) {
-            int sqlType = dataType.getSqlType();
-            SQL_TYPE_TO_PCOLUMN_DATA_TYPE[sqlType-SQL_TYPE_OFFSET] = dataType;
-        }
-    }
-
-         
-	private static interface PhoenixArrayFactory {
+    private static interface PhoenixArrayFactory {
 		PhoenixArray newArray(PDataType type, Object[] elements);
 	}
 
@@ -7402,4 +7397,9 @@ public enum PDataType {
     public void pad(ImmutableBytesWritable ptr, Integer maxLength) {
     }
     
+    public static PDataType arrayBaseType(PDataType arrayType) {
+        Preconditions.checkArgument(arrayType.isArrayType(), "Not a phoenix array type");
+        return fromTypeId(arrayType.getSqlType() - ARRAY_TYPE_BASE);
+    }
+    
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
index 64e3230..9aa1b83 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
@@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
 import java.sql.SQLException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -49,6 +50,7 @@ import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
 import org.apache.phoenix.index.IndexMaintainer;
+import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.join.TupleProjector;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.ColumnFamilyNotFoundException;
@@ -58,6 +60,8 @@ import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnFamily;
 import org.apache.phoenix.schema.PDataType;
 import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableKey;
+import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.schema.tuple.ResultTuple;
 import org.apache.phoenix.schema.tuple.Tuple;
 
@@ -154,6 +158,25 @@ public class IndexUtil {
             throw new IllegalArgumentException("Could not find column \"" +  indexColumnName.substring(pos+1) + "\" in index column name of \"" + indexColumnName + "\"", e);
         }
     }
+    
+    /**
+     * Return a list of {@code PColumn} for the associated data columns given the corresponding index columns. For a tenant
+     * specific view, the connection needs to be tenant specific too. 
+     * @param dataTableName
+     * @param indexColumns
+     * @param conn
+     * @return
+     * @throws TableNotFoundException if table cannot be found in the connection's metdata cache
+     */
+    public static List<PColumn> getDataColumns(String dataTableName, List<PColumn> indexColumns, PhoenixConnection conn) throws SQLException {
+        PTable dataTable = PhoenixRuntime.getTable(conn, dataTableName);
+        List<PColumn> dataColumns = new ArrayList<PColumn>(indexColumns.size());
+        for (PColumn indexColumn : indexColumns) {
+            dataColumns.add(getDataColumn(dataTable, indexColumn.getName().getString()));
+        }
+        return dataColumns;
+    }
+    
 
     private static boolean isEmptyKeyValue(PTable table, ColumnReference ref) {
         byte[] emptyKeyValueCF = SchemaUtil.getEmptyColumnFamily(table);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
index 492e940..0c82543 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/PhoenixRuntime.java
@@ -17,12 +17,16 @@
  */
 package org.apache.phoenix.util;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+import static org.apache.phoenix.schema.PDataType.ARRAY_TYPE_SUFFIX;
+
 import java.io.File;
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.Reader;
 import java.sql.Connection;
 import java.sql.DriverManager;
+import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -33,6 +37,9 @@ import java.util.Set;
 import java.util.StringTokenizer;
 import java.util.TreeSet;
 
+import javax.annotation.Nullable;
+
+import com.google.common.base.Preconditions;
 import com.google.common.base.Splitter;
 import com.google.common.collect.ImmutableList;
 import org.apache.commons.cli.CommandLine;
@@ -47,20 +54,29 @@ import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Mutation;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.LiteralExpression;
+import org.apache.phoenix.expression.OrderByExpression;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixPreparedStatement;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.AmbiguousColumnException;
 import org.apache.phoenix.schema.ColumnNotFoundException;
+import org.apache.phoenix.schema.KeyValueSchema;
+import org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder;
 import org.apache.phoenix.schema.MetaDataClient;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnFamily;
 import org.apache.phoenix.schema.PDataType;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTableKey;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.RowKeySchema;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.schema.ValueBitSet;
 
 import com.google.common.collect.Lists;
 
@@ -277,6 +293,7 @@ public class PhoenixRuntime {
         PTable table = null;
         PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
         try {
+        	name = SchemaUtil.normalizeIdentifier(name);
             table = pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), name));
         } catch (TableNotFoundException e) {
             String schemaName = SchemaUtil.getSchemaNameFromFullName(name);
@@ -397,83 +414,6 @@ public class PhoenixRuntime {
     }
 
     /**
-     * Encode the primary key values from the table as a byte array. The values must
-     * be in the same order as the primary key constraint. If the connection and
-     * table are both tenant-specific, the tenant ID column must not be present in
-     * the values.
-     * @param conn an open connection
-     * @param fullTableName the full table name
-     * @param values the values of the primary key columns ordered in the same order
-     *  as the primary key constraint
-     * @return the encoded byte array
-     * @throws SQLException if the table cannot be found or the incorrect number of
-     *  of values are provided
-     * @see #decodePK(Connection, String, byte[]) to decode the byte[] back to the
-     *  values
-     */
-    public static byte[] encodePK(Connection conn, String fullTableName, Object[] values) throws SQLException {
-        PTable table = getTable(conn, fullTableName);
-        PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
-        int offset = (table.getBucketNum() == null ? 0 : 1) + (table.isMultiTenant() && pconn.getTenantId() != null ? 1 : 0);
-        List<PColumn> pkColumns = table.getPKColumns();
-        if (pkColumns.size() - offset != values.length) {
-            throw new SQLException("Expected " + (pkColumns.size() - offset) + " but got " + values.length);
-        }
-        PDataType type = null;
-        TrustedByteArrayOutputStream output = new TrustedByteArrayOutputStream(table.getRowKeySchema().getEstimatedValueLength());
-        try {
-            for (int i = offset; i < pkColumns.size(); i++) {
-                if (type != null && !type.isFixedWidth()) {
-                    output.write(QueryConstants.SEPARATOR_BYTE);
-                }
-                type = pkColumns.get(i).getDataType();
-
-                //for fixed width data types like CHAR and BINARY, we need to pad values to be of max length.
-                Object paddedObj = type.pad(values[i - offset], pkColumns.get(i).getMaxLength());
-                byte[] value = type.toBytes(paddedObj);
-                output.write(value);
-            }
-            return output.toByteArray();
-        } finally {
-            try {
-                output.close();
-            } catch (IOException e) {
-                throw new RuntimeException(e); // Impossible
-            }
-        }
-    }
-
-    /**
-     * Decode a byte array value back into the Object values of the
-     * primary key constraint. If the connection and table are both
-     * tenant-specific, the tenant ID column is not expected to have
-     * been encoded and will not appear in the returned values.
-     * @param conn an open connection
-     * @param name the full table name
-     * @param encodedValue the value that was encoded with {@link #encodePK(Connection, String, Object[])}
-     * @return the Object values encoded in the byte array value
-     * @throws SQLException
-     */
-    public static Object[] decodePK(Connection conn, String name, byte[] value) throws SQLException {
-        PTable table = getTable(conn, name);
-        PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
-        int offset = (table.getBucketNum() == null ? 0 : 1) + (table.isMultiTenant() && pconn.getTenantId() != null ? 1 : 0);
-        int nValues = table.getPKColumns().size() - offset;
-        RowKeySchema schema = table.getRowKeySchema();
-        Object[] values = new Object[nValues];
-        ImmutableBytesWritable ptr = new ImmutableBytesWritable();
-        schema.iterator(value, ptr);
-        int i = 0;
-        int fieldIdx = offset;
-        while (i < nValues && schema.next(ptr, fieldIdx, value.length) != null) {
-            values[i] = schema.getField(fieldIdx).getDataType().toObject(ptr);
-            i++;
-            fieldIdx++;
-        }
-        return values;
-    }
-
-    /**
      * Represents the parsed commandline parameters definining the command or commands to be
      * executed.
      */
@@ -649,4 +589,262 @@ public class PhoenixRuntime {
             return strict;
         }
     }
+    
+    /**
+     * Returns the opitmized query plan used by phoenix for executing the sql.
+     * @param stmt to return the plan for
+     * @throws SQLException
+     */
+    public static QueryPlan getOptimizedQueryPlan(PreparedStatement stmt) throws SQLException {
+        checkNotNull(stmt);
+        QueryPlan plan = stmt.unwrap(PhoenixPreparedStatement.class).optimizeQuery();
+        return plan;
+    }
+    
+    /**
+     * Whether or not the query plan has any order by expressions.
+     * @param plan
+     * @return
+     */
+    public static boolean hasOrderBy(QueryPlan plan) {
+        checkNotNull(plan);
+        List<OrderByExpression> orderBys = plan.getOrderBy().getOrderByExpressions();
+        return orderBys != null && !orderBys.isEmpty(); 
+    }
+    
+    public static int getLimit(QueryPlan plan) {
+        checkNotNull(plan);
+        return plan.getLimit() == null ? 0 : plan.getLimit();
+    }
+    
+    private static String addQuotes(String str) {
+        return str == null ? str : "\"" + str + "\"";
+    }
+    /**
+     *
+     * @param columns - Initialized empty list to be filled with the pairs of column family name and column name for columns that are used 
+     * as row key for the query plan. Column family names are optional and hence the first part of the pair is nullable.
+     * Column names and family names are enclosed in double quotes to allow for case sensitivity and for presence of 
+     * special characters. Salting column and view index id column are not included. If the connection is tenant specific 
+     * and the table used by the query plan is multi-tenant, then the tenant id column is not included as well.
+     * @param plan - query plan to get info for.
+     * @param conn - connection used to generate the query plan. Caller should take care of closing the connection appropriately.
+     * @param forDataTable - if true, then family names and column names correspond to the data table even if the query plan uses
+     * the secondary index table. If false, and if the query plan uses the secondary index table, then the family names and column 
+     * names correspond to the index table.
+     * @throws SQLException
+     */
+    public static void getPkColsForSql(List<Pair<String, String>> columns, QueryPlan plan, Connection conn, boolean forDataTable) throws SQLException {
+        checkNotNull(columns);
+        checkNotNull(plan);
+        checkNotNull(conn);
+        List<PColumn> pkColumns = getPkColumns(plan.getTableRef().getTable(), conn, forDataTable);
+        String columnName;
+        String familyName;
+        for (PColumn pCol : pkColumns ) {
+            columnName = addQuotes(pCol.getName().getString());
+            familyName = pCol.getFamilyName() != null ? addQuotes(pCol.getFamilyName().getString()) : null;
+            columns.add(new Pair<String, String>(familyName, columnName));
+        }
+    }
+
+    /**
+     * @param columns - Initialized empty list to be filled with the pairs of column family name and column name for columns that are used 
+     * as row key for the query plan. Column family names are optional and hence the first part of the pair is nullable.
+     * Column names and family names are enclosed in double quotes to allow for case sensitivity and for presence of 
+     * special characters. Salting column and view index id column are not included. If the connection is tenant specific 
+     * and the table used by the query plan is multi-tenant, then the tenant id column is not included as well.
+     * @param datatypes - Initialized empty list to be filled with the corresponding data type for the columns in @param columns. 
+     * @param plan - query plan to get info for
+     * @param conn - phoenix connection used to generate the query plan. Caller should take care of closing the connection appropriately.
+     * @param forDataTable - if true, then column names and data types correspond to the data table even if the query plan uses
+     * the secondary index table. If false, and if the query plan uses the secondary index table, then the column names and data 
+     * types correspond to the index table.
+     * @throws SQLException
+     */
+    public static void getPkColsDataTypesForSql(List<Pair<String, String>> columns, List<String> dataTypes, QueryPlan plan, Connection conn, boolean forDataTable) throws SQLException {
+        checkNotNull(columns);
+        checkNotNull(dataTypes);
+        checkNotNull(plan);
+        checkNotNull(conn);
+        List<PColumn> pkColumns = getPkColumns(plan.getTableRef().getTable(), conn, forDataTable);
+        String columnName;
+        String familyName;
+        for (PColumn pCol : pkColumns) {
+            String sqlTypeName = getSqlTypeName(pCol);
+            dataTypes.add(sqlTypeName);
+            columnName = addQuotes(pCol.getName().getString());
+            familyName = pCol.getFamilyName() != null ? addQuotes(pCol.getFamilyName().getString()) : null;
+            columns.add(new Pair<String, String>(familyName, columnName));
+        }
+    }
+    
+    /**
+     * 
+     * @param pCol
+     * @return sql type name that could be used in DDL statements, dynamic column types etc. 
+     */
+    public static String getSqlTypeName(PColumn pCol) {
+        PDataType dataType = pCol.getDataType();
+        Integer maxLength = pCol.getMaxLength();
+        Integer scale = pCol.getScale();
+        return dataType.isArrayType() ? getArraySqlTypeName(maxLength, scale, dataType) : appendMaxLengthAndScale(maxLength, scale, dataType.getSqlTypeName());
+    }
+    
+    public static String getArraySqlTypeName(@Nullable Integer maxLength, @Nullable Integer scale, PDataType arrayType) {
+        String baseTypeSqlName = PDataType.arrayBaseType(arrayType).getSqlTypeName();
+        return appendMaxLengthAndScale(maxLength, scale, baseTypeSqlName) + " " + ARRAY_TYPE_SUFFIX; // for ex - decimal(10,2) ARRAY
+    }
+
+    private static String appendMaxLengthAndScale(@Nullable Integer maxLength, @Nullable Integer scale, String sqlTypeName) {
+        if (maxLength != null) {
+             sqlTypeName = sqlTypeName + "(" + maxLength;
+             if (scale != null) {
+               sqlTypeName = sqlTypeName + "," + scale; // has both max length and scale. For ex- decimal(10,2)
+             }       
+             sqlTypeName = sqlTypeName + ")";
+        }
+        return sqlTypeName;
+    }
+    
+    private static List<PColumn> getPkColumns(PTable ptable, Connection conn, boolean forDataTable) throws SQLException {
+        PhoenixConnection pConn = conn.unwrap(PhoenixConnection.class);
+        List<PColumn> pkColumns = ptable.getPKColumns();
+        
+        // Skip the salting column and the view index id column if present.
+        // Skip the tenant id column too if the connection is tenant specific and the table used by the query plan is multi-tenant
+        int offset = (ptable.getBucketNum() == null ? 0 : 1) + (ptable.isMultiTenant() && pConn.getTenantId() != null ? 1 : 0) + (ptable.getViewIndexId() == null ? 0 : 1);
+        
+        // get a sublist of pkColumns by skipping the offset columns.
+        pkColumns = pkColumns.subList(offset, pkColumns.size());
+        
+        if (ptable.getType() == PTableType.INDEX && forDataTable) {
+            // index tables have the same schema name as their parent/data tables.
+            String fullDataTableName = ptable.getParentName().getString();
+            
+            // Get the corresponding columns of the data table.
+            List<PColumn> dataColumns = IndexUtil.getDataColumns(fullDataTableName, pkColumns, pConn);
+            pkColumns = dataColumns;
+        }
+        return pkColumns;
+    }
+    
+    /**
+     * 
+     * @param conn connection that was used for reading/generating value.
+     * @param fullTableName fully qualified table name
+     * @param values values of the columns
+     * @param columns list of pair of column that includes column family as first part and column name as the second part.
+     * Column family is optional and hence nullable. Columns in the list have to be in the same order as the order of occurence
+     * of their values in the object array.
+     * @return values encoded in a byte array 
+     * @throws SQLException
+     * @see {@link #decodeValues(Connection, String, byte[], List)}
+     */
+    public static byte[] encodeValues(Connection conn, String fullTableName, Object[] values, List<Pair<String, String>> columns) throws SQLException {
+        PTable table = getTable(conn, fullTableName);
+        List<PColumn> pColumns = getPColumns(table, columns);
+        List<Expression> expressions = new ArrayList<Expression>(pColumns.size());
+        int i = 0;
+        for (PColumn col : pColumns) {
+            Object value = values[i];
+            // for purposes of encoding, sort order of the columns doesn't matter.
+            Expression expr = LiteralExpression.newConstant(value, col.getDataType(), col.getMaxLength(), col.getScale());
+            expressions.add(expr);
+            i++;
+        }
+        KeyValueSchema kvSchema = buildKeyValueSchema(pColumns);
+        ImmutableBytesWritable ptr = new ImmutableBytesWritable();
+        ValueBitSet valueSet = ValueBitSet.newInstance(kvSchema);
+        return kvSchema.toBytes(expressions.toArray(new Expression[0]), valueSet, ptr);
+    }
+    
+    
+    /**
+     * 
+     * @param conn connection that was used for reading/generating value.
+     * @param fullTableName fully qualified table name
+     * @param value byte value of the columns concatenated as a single byte array. @see {@link #encodeValues(Connection, String, Object[], List)}
+     * @param columns list of column names for the columns that have their respective values
+     * present in the byte array. The column names should be in the same order as their values are in the byte array.
+     * The column name includes both family name, if present, and column name.
+     * @return decoded values for each column
+     * @throws SQLException
+     * 
+     */
+    public static Object[] decodeValues(Connection conn, String fullTableName, byte[] value, List<Pair<String, String>> columns) throws SQLException {
+        PTable table = getTable(conn, fullTableName);
+        KeyValueSchema kvSchema = buildKeyValueSchema(getPColumns(table, columns));
+        ImmutableBytesWritable ptr = new ImmutableBytesWritable(value);
+        ValueBitSet valueSet = ValueBitSet.newInstance(kvSchema);
+        valueSet.clear();
+        valueSet.or(ptr);
+        int maxOffset = ptr.getOffset() + ptr.getLength();
+        Boolean hasValue;
+        kvSchema.iterator(ptr);
+        int i = 0;
+        List<Object> values = new ArrayList<Object>();
+        while(hasValue = kvSchema.next(ptr, i, maxOffset, valueSet) != null) {
+            if(hasValue) {
+                values.add(kvSchema.getField(i).getDataType().toObject(ptr));
+            }
+            i++;
+        }
+        return values.toArray();
+    }
+    
+    private static KeyValueSchema buildKeyValueSchema(List<PColumn> columns) {
+        KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(getMinNullableIndex(columns));
+        for (PColumn col : columns) {
+            builder.addField(col);
+        }
+        return builder.build();
+    }
+    
+    private static int getMinNullableIndex(List<PColumn> columns) {
+        int minNullableIndex = columns.size();
+        for (int i = 0; i < columns.size(); i++) {
+            if (columns.get(i).isNullable()) {
+                minNullableIndex = i;
+                break;
+            }
+         }
+        return minNullableIndex;
+    }
+    
+   /**
+     * @param table table to get the {@code PColumn} for
+     * @param columns list of pair of column that includes column family as first part and column name as the second part.
+     * Column family is optional and hence nullable. 
+     * @return list of {@code PColumn} for fullyQualifiedColumnNames
+     * @throws SQLException 
+     */
+    private static List<PColumn> getPColumns(PTable table, List<Pair<String, String>> columns) throws SQLException {
+        List<PColumn> pColumns = new ArrayList<PColumn>(columns.size());
+        for (Pair<String, String> column : columns) {
+            pColumns.add(getPColumn(table, column.getFirst(), column.getSecond()));
+        }
+        return pColumns;
+    }
+    
+    private static PColumn getPColumn(PTable table, @Nullable String familyName, String columnName) throws SQLException {
+        if (table==null) {
+            throw new SQLException("Table must not be null.");
+        }
+        if (columnName==null) {
+            throw new SQLException("columnName must not be null.");
+        }
+        // normalize and remove quotes from family and column names before looking up.
+        familyName = SchemaUtil.normalizeIdentifier(familyName);
+        columnName = SchemaUtil.normalizeIdentifier(columnName);
+        PColumn pColumn = null;
+        if (familyName != null) {
+            PColumnFamily family = table.getColumnFamily(familyName);
+            pColumn = family.getColumn(columnName);
+        } else {
+            pColumn = table.getColumn(columnName);
+        }
+        return pColumn;
+    }
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/17eb70d8/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
index 309b4be..42b4e30 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/SchemaUtil.java
@@ -17,6 +17,9 @@
  */
 package org.apache.phoenix.util;
 
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Strings.isNullOrEmpty;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_NAME_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_STATS_NAME_BYTES;
 
@@ -28,6 +31,8 @@ import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Properties;
 
+import javax.annotation.Nullable;
+
 import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.exception.SQLExceptionCode;
@@ -54,6 +59,7 @@ import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.schema.ValueSchema.Field;
 
 import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
 
 /**
  * 
@@ -609,4 +615,26 @@ public class SchemaUtil {
         Preconditions.checkNotNull(argument,"Argument passed cannot be null");
         return ESCAPE_CHARACTER + argument + ESCAPE_CHARACTER;
     }
+    
+    /**
+     * 
+     * @return a fully qualified column name in the format: "CFNAME"."COLNAME" or "COLNAME" depending on whether or not
+     * there is a column family name present. 
+     */
+    public static String getQuotedFullColumnName(PColumn pCol) {
+        checkNotNull(pCol);
+        String columnName = pCol.getName().getString();
+        String columnFamilyName = pCol.getFamilyName() != null ? pCol.getFamilyName().getString() : null;
+        return getQuotedFullColumnName(columnFamilyName, columnName);
+    }
+    
+    /**
+     * 
+     * @return a fully qualified column name in the format: "CFNAME"."COLNAME" or "COLNAME" depending on whether or not
+     * there is a column family name present. 
+     */
+    public static String getQuotedFullColumnName(@Nullable String columnFamilyName, String columnName) {
+        checkArgument(!isNullOrEmpty(columnName), "Column name cannot be null or empty");
+        return columnFamilyName == null ? ("\"" + columnName + "\"") : ("\"" + columnFamilyName + "\"" + QueryConstants.NAME_SEPARATOR + "\"" + columnName + "\"");
+    }
 }