You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ja...@apache.org on 2015/02/04 21:34:36 UTC

[1/4] phoenix git commit: PHOENIX-514 Support functional indexes (Thomas D'Silva)

Repository: phoenix
Updated Branches:
  refs/heads/master d6e7846f4 -> 8c340f5a6


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java
index 91e4f36..964ac39 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/SaltingUtil.java
@@ -38,7 +38,7 @@ public class SaltingUtil {
     public static final String SALTING_COLUMN_NAME = "_SALT";
     public static final String SALTED_ROW_KEY_NAME = "_SALTED_KEY";
     public static final PColumnImpl SALTING_COLUMN = new PColumnImpl(
-            PNameFactory.newName(SALTING_COLUMN_NAME), null, PBinary.INSTANCE, 1, 0, false, 0, SortOrder.getDefault(), 0, null, false);
+            PNameFactory.newName(SALTING_COLUMN_NAME), null, PBinary.INSTANCE, 1, 0, false, 0, SortOrder.getDefault(), 0, null, false, null);
     public static final RowKeySchema VAR_BINARY_SALTED_SCHEMA = new RowKeySchemaBuilder(2)
         .addField(SALTING_COLUMN, false, SortOrder.getDefault())
         .addField(SchemaUtil.VAR_BINARY_DATUM, false, SortOrder.getDefault()).build();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/ValueGetterTuple.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/ValueGetterTuple.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/ValueGetterTuple.java
new file mode 100644
index 0000000..6fc480e
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/tuple/ValueGetterTuple.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.schema.tuple;
+
+import static org.apache.phoenix.hbase.index.util.ImmutableBytesPtr.copyBytesIfNecessary;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.KeyValue.Type;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.hbase.index.ValueGetter;
+import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
+import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
+
+/**
+ * 
+ * Class used to construct a {@link Tuple} in order to evaluate an {@link Expression}
+ */
+public class ValueGetterTuple extends BaseTuple {
+	private ValueGetter valueGetter;
+    
+    public ValueGetterTuple(ValueGetter valueGetter) {
+        this.valueGetter = valueGetter;
+    }
+    
+    public ValueGetterTuple() {
+    }
+    
+    @Override
+    public void getKey(ImmutableBytesWritable ptr) {
+        ptr.set(valueGetter.getRowKey());
+    }
+
+    @Override
+    public boolean isImmutable() {
+        return true;
+    }
+
+    @Override
+    public KeyValue getValue(byte[] family, byte[] qualifier) {
+    	ImmutableBytesPtr value = null;
+        try {
+            value = valueGetter.getLatestValue(new ColumnReference(family, qualifier));
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    	return new KeyValue(valueGetter.getRowKey(), family, qualifier, HConstants.LATEST_TIMESTAMP, Type.Put, value!=null? copyBytesIfNecessary(value) : null);
+    }
+
+    @Override
+    public String toString() {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public int size() {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public KeyValue getValue(int index) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public boolean getValue(byte[] family, byte[] qualifier,
+            ImmutableBytesWritable ptr) {
+        KeyValue kv = getValue(family, qualifier);
+        if (kv == null)
+            return false;
+        ptr.set(kv.getBuffer(), kv.getValueOffset(), kv.getValueLength());
+        return true;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
index 39e13bf..8dd4f4d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
@@ -209,10 +209,10 @@ public class IndexUtil {
     }
 
     public static List<Mutation> generateIndexData(final PTable table, PTable index,
-            List<Mutation> dataMutations, ImmutableBytesWritable ptr, final KeyValueBuilder kvBuilder)
+            List<Mutation> dataMutations, ImmutableBytesWritable ptr, final KeyValueBuilder kvBuilder, PhoenixConnection connection)
             throws SQLException {
         try {
-            IndexMaintainer maintainer = index.getIndexMaintainer(table);
+            IndexMaintainer maintainer = index.getIndexMaintainer(table, connection);
             List<Mutation> indexMutations = Lists.newArrayListWithExpectedSize(dataMutations.size());
            for (final Mutation dataMutation : dataMutations) {
                 long ts = MetaDataUtil.getClientTimeStamp(dataMutation);
@@ -227,6 +227,11 @@ public class IndexUtil {
                     // TODO: is this more efficient than looking in our mutation map
                     // using the key plus finding the PColumn?
                     ValueGetter valueGetter = new ValueGetter() {
+                    	
+                    	@Override
+                        public byte[] getRowKey() {
+                    		return dataMutation.getRow();
+                    	}
         
                         @Override
                         public ImmutableBytesPtr getLatestValue(ColumnReference ref) {
@@ -267,6 +272,10 @@ public class IndexUtil {
         return column.getName().getString().startsWith(INDEX_COLUMN_NAME_SEP);
     }
     
+    public static boolean isIndexColumn(PColumn column) {
+        return column.getName().getString().contains(INDEX_COLUMN_NAME_SEP);
+    }
+    
     public static boolean getViewConstantValue(PColumn column, ImmutableBytesWritable ptr) {
         byte[] value = column.getViewConstant();
         if (value != null) {
@@ -441,7 +450,7 @@ public class IndexUtil {
         PhoenixStatement statement = new PhoenixStatement(conn);
         TableRef indexTableRef = new TableRef(index) {
             @Override
-            public String getColumnDisplayName(ColumnRef ref, boolean cfCaseSensitive, boolean cqCaseSensitive) {
+            public String getColumnDisplayName(ColumnRef ref, boolean schemaNameCaseSensitive, boolean colNameCaseSensitive) {
                 return '"' + ref.getColumn().getName().getString() + '"';
             }
         };

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java b/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java
index b91d8ca..06d21c2 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/expression/ColumnExpressionTest.java
@@ -41,7 +41,7 @@ public class ColumnExpressionTest {
         int maxLen = 30;
         int scale = 5;
         PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PDecimal.INSTANCE, maxLen, scale,
-                true, 20, SortOrder.getDefault(), 0, null, false);
+                true, 20, SortOrder.getDefault(), 0, null, false, null);
         ColumnExpression colExp = new KeyValueColumnExpression(column);
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         DataOutputStream dOut = new DataOutputStream(baos);
@@ -61,7 +61,7 @@ public class ColumnExpressionTest {
     public void testSerializationWithNullScale() throws Exception {
         int maxLen = 30;
         PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PBinary.INSTANCE, maxLen, null,
-                true, 20, SortOrder.getDefault(), 0, null, false);
+                true, 20, SortOrder.getDefault(), 0, null, false, null);
         ColumnExpression colExp = new KeyValueColumnExpression(column);
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         DataOutputStream dOut = new DataOutputStream(baos);
@@ -81,7 +81,7 @@ public class ColumnExpressionTest {
     public void testSerializationWithNullMaxLength() throws Exception {
         int scale = 5;
         PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PVarchar.INSTANCE, null, scale,
-                true, 20, SortOrder.getDefault(), 0, null, false);
+                true, 20, SortOrder.getDefault(), 0, null, false, null);
         ColumnExpression colExp = new KeyValueColumnExpression(column);
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         DataOutputStream dOut = new DataOutputStream(baos);
@@ -100,7 +100,7 @@ public class ColumnExpressionTest {
     @Test
     public void testSerializationWithNullScaleAndMaxLength() throws Exception {
         PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PDecimal.INSTANCE, null, null, true,
-                20, SortOrder.getDefault(), 0, null, false);
+                20, SortOrder.getDefault(), 0, null, false, null);
         ColumnExpression colExp = new KeyValueColumnExpression(column);
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
         DataOutputStream dOut = new DataOutputStream(baos);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/test/java/org/apache/phoenix/index/IndexMaintainerTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/index/IndexMaintainerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/index/IndexMaintainerTest.java
index 183f699..592ac7c 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/index/IndexMaintainerTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/index/IndexMaintainerTest.java
@@ -69,13 +69,18 @@ public class IndexMaintainerTest  extends BaseConnectionlessQueryTest {
         testIndexRowKeyBuilding(DEFAULT_SCHEMA_NAME, DEFAULT_TABLE_NAME, dataColumns, pk, indexColumns, values, "", dataProps, indexProps);
     }
 
-    private static ValueGetter newValueGetter(final Map<ColumnReference, byte[]> valueMap) {
+    private static ValueGetter newValueGetter(final byte[] row, final Map<ColumnReference, byte[]> valueMap) {
         return new ValueGetter() {
 
             @Override
             public ImmutableBytesPtr getLatestValue(ColumnReference ref) {
                 return new ImmutableBytesPtr(valueMap.get(ref));
             }
+
+			@Override
+			public byte[] getRowKey() {
+				return row;
+			}
             
         };
     }
@@ -98,7 +103,7 @@ public class IndexMaintainerTest  extends BaseConnectionlessQueryTest {
             PTable table = pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), fullTableName));
             PTable index = pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(),fullIndexName));
             ImmutableBytesWritable ptr = new ImmutableBytesWritable();
-            table.getIndexMaintainers(ptr);
+            table.getIndexMaintainers(ptr, pconn);
             List<IndexMaintainer> c1 = IndexMaintainer.deserialize(ptr, builder);
             assertEquals(1,c1.size());
             IndexMaintainer im1 = c1.get(0);
@@ -116,13 +121,14 @@ public class IndexMaintainerTest  extends BaseConnectionlessQueryTest {
             	Iterator<Pair<byte[],List<KeyValue>>> iterator = PhoenixRuntime.getUncommittedDataIterator(conn);
             List<KeyValue> dataKeyValues = iterator.next().getSecond();
             Map<ColumnReference,byte[]> valueMap = Maps.newHashMapWithExpectedSize(dataKeyValues.size());
-            ImmutableBytesWritable rowKeyPtr = new ImmutableBytesWritable(dataKeyValues.get(0).getRow());
+            byte[] row = dataKeyValues.get(0).getRow();
+			ImmutableBytesWritable rowKeyPtr = new ImmutableBytesWritable(row);
             Put dataMutation = new Put(rowKeyPtr.copyBytes());
             for (KeyValue kv : dataKeyValues) {
                 valueMap.put(new ColumnReference(kv.getFamily(),kv.getQualifier()), kv.getValue());
                 dataMutation.add(kv);
             }
-            ValueGetter valueGetter = newValueGetter(valueMap);
+            ValueGetter valueGetter = newValueGetter(row, valueMap);
             
             List<Mutation> indexMutations =
                     IndexTestUtil.generateIndexData(index, table, dataMutation, ptr, builder);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java b/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java
index f02738d..5624b51 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/iterate/AggregateResultScannerTest.java
@@ -124,6 +124,11 @@ public class AggregateResultScannerTest extends BaseConnectionlessQueryTest {
             public boolean isViewReferenced() {
                 return false;
             }
+            
+            @Override
+            public String getExpressionStr() {
+                return null;
+            }
         })), null);
         aggregationManager.setAggregators(new ClientAggregators(Collections.<SingleAggregateFunction>singletonList(func), 1));
         ResultIterators iterators = new ResultIterators() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
index 6a2ce26..f81c3a9 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/query/BaseTest.java
@@ -376,39 +376,45 @@ public abstract class BaseTest {
                 "    kv bigint)\n");
         builder.put(INDEX_DATA_TABLE, "create table " + INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + INDEX_DATA_TABLE + "(" +
                 "   varchar_pk VARCHAR NOT NULL, " +
-                "   char_pk CHAR(5) NOT NULL, " +
+                "   char_pk CHAR(6) NOT NULL, " +
                 "   int_pk INTEGER NOT NULL, "+ 
                 "   long_pk BIGINT NOT NULL, " +
                 "   decimal_pk DECIMAL(31, 10) NOT NULL, " +
+                "   date_pk DATE NOT NULL, " +
                 "   a.varchar_col1 VARCHAR, " +
-                "   a.char_col1 CHAR(5), " +
+                "   a.char_col1 CHAR(10), " +
                 "   a.int_col1 INTEGER, " +
                 "   a.long_col1 BIGINT, " +
                 "   a.decimal_col1 DECIMAL(31, 10), " +
+                "   a.date1 DATE, " +
                 "   b.varchar_col2 VARCHAR, " +
-                "   b.char_col2 CHAR(5), " +
+                "   b.char_col2 CHAR(10), " +
                 "   b.int_col2 INTEGER, " +
                 "   b.long_col2 BIGINT, " +
-                "   b.decimal_col2 DECIMAL(31, 10) " +
-                "   CONSTRAINT pk PRIMARY KEY (varchar_pk, char_pk, int_pk, long_pk DESC, decimal_pk)) " +
+                "   b.decimal_col2 DECIMAL(31, 10), " +
+                "   b.date2 DATE " +
+                "   CONSTRAINT pk PRIMARY KEY (varchar_pk, char_pk, int_pk, long_pk DESC, decimal_pk, date_pk)) " +
                 "IMMUTABLE_ROWS=true");
         builder.put(MUTABLE_INDEX_DATA_TABLE, "create table " + INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + MUTABLE_INDEX_DATA_TABLE + "(" +
                 "   varchar_pk VARCHAR NOT NULL, " +
-                "   char_pk CHAR(5) NOT NULL, " +
+                "   char_pk CHAR(6) NOT NULL, " +
                 "   int_pk INTEGER NOT NULL, "+ 
                 "   long_pk BIGINT NOT NULL, " +
                 "   decimal_pk DECIMAL(31, 10) NOT NULL, " +
+                "   date_pk DATE NOT NULL, " +
                 "   a.varchar_col1 VARCHAR, " +
-                "   a.char_col1 CHAR(5), " +
+                "   a.char_col1 CHAR(10), " +
                 "   a.int_col1 INTEGER, " +
                 "   a.long_col1 BIGINT, " +
                 "   a.decimal_col1 DECIMAL(31, 10), " +
+                "   a.date1 DATE, " +
                 "   b.varchar_col2 VARCHAR, " +
-                "   b.char_col2 CHAR(5), " +
+                "   b.char_col2 CHAR(10), " +
                 "   b.int_col2 INTEGER, " +
                 "   b.long_col2 BIGINT, " +
-                "   b.decimal_col2 DECIMAL(31, 10) " +
-                "   CONSTRAINT pk PRIMARY KEY (varchar_pk, char_pk, int_pk, long_pk DESC, decimal_pk)) "
+                "   b.decimal_col2 DECIMAL(31, 10), " +
+                "   b.date2 DATE " +
+                "   CONSTRAINT pk PRIMARY KEY (varchar_pk, char_pk, int_pk, long_pk DESC, decimal_pk, date_pk)) "
                 );
         builder.put("SumDoubleTest","create table SumDoubleTest" +
                 "   (id varchar not null primary key, d DOUBLE, f FLOAT, ud UNSIGNED_DOUBLE, uf UNSIGNED_FLOAT, i integer, de decimal)");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-protocol/src/main/PTable.proto
----------------------------------------------------------------------
diff --git a/phoenix-protocol/src/main/PTable.proto b/phoenix-protocol/src/main/PTable.proto
index 89ceea3..348631f 100644
--- a/phoenix-protocol/src/main/PTable.proto
+++ b/phoenix-protocol/src/main/PTable.proto
@@ -46,6 +46,7 @@ message PColumn {
   optional int32 arraySize = 9;
   optional bytes viewConstant = 10;
   optional bool viewReferenced = 11;
+  optional string expression = 12;
 }
 
 message PTableStats {


[3/4] phoenix git commit: PHOENIX-514 Support functional indexes (Thomas D'Silva)

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/PostIndexDDLCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/PostIndexDDLCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/PostIndexDDLCompiler.java
index 2ea42ce..c8cf28e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/PostIndexDDLCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/PostIndexDDLCompiler.java
@@ -22,7 +22,6 @@ import java.util.List;
 
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixStatement;
-import org.apache.phoenix.schema.ColumnNotFoundException;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnFamily;
 import org.apache.phoenix.schema.PTable;
@@ -56,35 +55,37 @@ public class PostIndexDDLCompiler {
         //   that would allow the user to easily monitor the process of index creation.
         StringBuilder indexColumns = new StringBuilder();
         StringBuilder dataColumns = new StringBuilder();
-        List<PColumn> dataPKColumns = dataTableRef.getTable().getPKColumns();
-        PTable dataTable = dataTableRef.getTable();
-        int nPKColumns = dataPKColumns.size();
-        boolean isSalted = dataTable.getBucketNum() != null;
-        boolean isMultiTenant = connection.getTenantId() != null && dataTable.isMultiTenant();
-        int posOffset = (isSalted ? 1 : 0) + (isMultiTenant ? 1 : 0);
-        for (int i = posOffset; i < nPKColumns; i++) {
-            PColumn col = dataPKColumns.get(i);
-            if (col.getViewConstant() == null) {
-                String indexColName = IndexUtil.getIndexColumnName(col);
-                dataColumns.append('"').append(col.getName()).append("\",");
-                indexColumns.append('"').append(indexColName).append("\",");
-            }
+        
+        // Add the pk index columns
+        List<PColumn> indexPKColumns = indexTable.getPKColumns();
+        int nIndexPKColumns = indexTable.getPKColumns().size();
+        boolean isSalted = indexTable.getBucketNum() != null;
+        boolean isMultiTenant = connection.getTenantId() != null && indexTable.isMultiTenant();
+        boolean isViewIndex = indexTable.getViewIndexId()!=null;
+        int posOffset = (isSalted ? 1 : 0) + (isMultiTenant ? 1 : 0) + (isViewIndex ? 1 : 0);
+        for (int i = posOffset; i < nIndexPKColumns; i++) {
+            PColumn col = indexPKColumns.get(i);
+            String indexColName = col.getName().getString();
+            dataColumns.append(col.getExpressionStr()).append(",");
+            indexColumns.append('"').append(indexColName).append("\",");
         }
-        for (PColumnFamily family : dataTableRef.getTable().getColumnFamilies()) {
+        
+        // Add the covered columns
+        for (PColumnFamily family : indexTable.getColumnFamilies()) {
             for (PColumn col : family.getColumns()) {
                 if (col.getViewConstant() == null) {
-                    String indexColName = IndexUtil.getIndexColumnName(col);
-                    try {
-                        indexTable.getColumn(indexColName);
-                        dataColumns.append('"').append(col.getFamilyName()).append("\".");
-                        dataColumns.append('"').append(col.getName()).append("\",");
-                        indexColumns.append('"').append(indexColName).append("\",");
-                    } catch (ColumnNotFoundException e) {
-                        // Catch and ignore - means that this data column is not in the index
+                    String indexColName = col.getName().getString();
+                    String dataFamilyName = IndexUtil.getDataColumnFamilyName(indexColName);
+                    String dataColumnName = IndexUtil.getDataColumnName(indexColName);
+                    if (!dataFamilyName.equals("")) {
+                        dataColumns.append('"').append(dataFamilyName).append("\".");
                     }
+                    dataColumns.append('"').append(dataColumnName).append("\",");
+                    indexColumns.append('"').append(indexColName).append("\",");
                 }
             }
         }
+
         dataColumns.setLength(dataColumns.length()-1);
         indexColumns.setLength(indexColumns.length()-1);
         String schemaName = dataTableRef.getTable().getSchemaName().getString();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index d534d50..2ac075e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -603,7 +603,7 @@ public class UpsertCompiler {
                         @Override
                         public MutationState execute() throws SQLException {
                             ImmutableBytesWritable ptr = context.getTempPtr();
-                            tableRef.getTable().getIndexMaintainers(ptr);
+                            tableRef.getTable().getIndexMaintainers(ptr, context.getConnection());
                             ServerCache cache = null;
                             try {
                                 if (ptr.getLength() > 0) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
index 1360178..406b567 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
@@ -49,12 +49,12 @@ import org.apache.phoenix.parse.SubqueryParseNode;
 import org.apache.phoenix.schema.AmbiguousColumnException;
 import org.apache.phoenix.schema.ColumnNotFoundException;
 import org.apache.phoenix.schema.ColumnRef;
-import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.schema.TypeMismatchException;
+import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.ScanUtil;
 import org.apache.phoenix.util.SchemaUtil;
@@ -173,7 +173,7 @@ public class WhereCompiler {
                 context.addWhereCoditionColumn(ref.getColumn().getFamilyName().getBytes(), ref.getColumn().getName()
                         .getBytes());
             }
-            return ref.newColumnExpression();
+            return ref.newColumnExpression(node.isTableNameCaseSensitive(), node.isCaseSensitive());
         }
 
         @Override

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index 9d055c3..ce81e1f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -19,6 +19,7 @@ package org.apache.phoenix.coprocessor;
 
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ARRAY_SIZE_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_COUNT_BYTES;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_DEF_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_NAME_INDEX;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_SIZE_BYTES;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.DATA_TABLE_NAME_BYTES;
@@ -60,7 +61,9 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.Comparator;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.Coprocessor;
@@ -105,25 +108,23 @@ import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest
 import org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse;
 import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse;
 import org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest;
+import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.hbase.index.util.GenericKeyValueBuilder;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.hbase.index.util.IndexManagementUtil;
+import org.apache.phoenix.index.IndexMaintainer;
+import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.metrics.Metrics;
 import org.apache.phoenix.protobuf.ProtobufUtil;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.AmbiguousColumnException;
-import org.apache.phoenix.schema.types.PBinary;
 import org.apache.phoenix.schema.ColumnFamilyNotFoundException;
 import org.apache.phoenix.schema.ColumnNotFoundException;
-import org.apache.phoenix.schema.types.PBoolean;
-import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnFamily;
 import org.apache.phoenix.schema.PColumnImpl;
-import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.PIndexState;
-import org.apache.phoenix.schema.types.PLong;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
@@ -132,18 +133,23 @@ import org.apache.phoenix.schema.PTable.LinkType;
 import org.apache.phoenix.schema.PTable.ViewType;
 import org.apache.phoenix.schema.PTableImpl;
 import org.apache.phoenix.schema.PTableType;
-import org.apache.phoenix.schema.types.PVarbinary;
-import org.apache.phoenix.schema.types.PVarchar;
 import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.schema.stats.PTableStats;
 import org.apache.phoenix.schema.stats.StatisticsUtil;
 import org.apache.phoenix.schema.tuple.ResultTuple;
+import org.apache.phoenix.schema.types.PBinary;
+import org.apache.phoenix.schema.types.PBoolean;
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.schema.types.PInteger;
+import org.apache.phoenix.schema.types.PLong;
+import org.apache.phoenix.schema.types.PVarbinary;
+import org.apache.phoenix.schema.types.PVarchar;
 import org.apache.phoenix.trace.util.Tracing;
 import org.apache.phoenix.util.ByteUtil;
-import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.KeyValueUtil;
 import org.apache.phoenix.util.MetaDataUtil;
+import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
 import org.apache.phoenix.util.ServerUtil;
 import org.slf4j.Logger;
@@ -242,6 +248,7 @@ public class MetaDataEndpointImpl extends MetaDataProtocol implements Coprocesso
     private static final KeyValue ARRAY_SIZE_KV = KeyValue.createFirstOnRow(ByteUtil.EMPTY_BYTE_ARRAY, TABLE_FAMILY_BYTES, ARRAY_SIZE_BYTES);
     private static final KeyValue VIEW_CONSTANT_KV = KeyValue.createFirstOnRow(ByteUtil.EMPTY_BYTE_ARRAY, TABLE_FAMILY_BYTES, VIEW_CONSTANT_BYTES);
     private static final KeyValue IS_VIEW_REFERENCED_KV = KeyValue.createFirstOnRow(ByteUtil.EMPTY_BYTE_ARRAY, TABLE_FAMILY_BYTES, IS_VIEW_REFERENCED_BYTES);
+    private static final KeyValue COLUMN_DEF_KV = KeyValue.createFirstOnRow(ByteUtil.EMPTY_BYTE_ARRAY, TABLE_FAMILY_BYTES, COLUMN_DEF_BYTES);
     private static final List<KeyValue> COLUMN_KV_COLUMNS = Arrays.<KeyValue>asList(
             DECIMAL_DIGITS_KV,
             COLUMN_SIZE_KV,
@@ -252,7 +259,8 @@ public class MetaDataEndpointImpl extends MetaDataProtocol implements Coprocesso
             DATA_TABLE_NAME_KV, // included in both column and table row for metadata APIs
             ARRAY_SIZE_KV,
             VIEW_CONSTANT_KV,
-            IS_VIEW_REFERENCED_KV
+            IS_VIEW_REFERENCED_KV,
+            COLUMN_DEF_KV
             );
     static {
         Collections.sort(COLUMN_KV_COLUMNS, KeyValue.COMPARATOR);
@@ -266,7 +274,8 @@ public class MetaDataEndpointImpl extends MetaDataProtocol implements Coprocesso
     private static final int ARRAY_SIZE_INDEX = COLUMN_KV_COLUMNS.indexOf(ARRAY_SIZE_KV);
     private static final int VIEW_CONSTANT_INDEX = COLUMN_KV_COLUMNS.indexOf(VIEW_CONSTANT_KV);
     private static final int IS_VIEW_REFERENCED_INDEX = COLUMN_KV_COLUMNS.indexOf(IS_VIEW_REFERENCED_KV);
-
+    private static final int COLUMN_DEF_INDEX = COLUMN_KV_COLUMNS.indexOf(COLUMN_DEF_KV);
+    
     private static final int LINK_TYPE_INDEX = 0;
 
     private static PName newPName(byte[] keyBuffer, int keyOffset, int keyLength) {
@@ -460,7 +469,9 @@ public class MetaDataEndpointImpl extends MetaDataProtocol implements Coprocesso
         byte[] viewConstant = viewConstantKv == null ? null : viewConstantKv.getValue();
         Cell isViewReferencedKv = colKeyValues[IS_VIEW_REFERENCED_INDEX];
         boolean isViewReferenced = isViewReferencedKv != null && Boolean.TRUE.equals(PBoolean.INSTANCE.toObject(isViewReferencedKv.getValueArray(), isViewReferencedKv.getValueOffset(), isViewReferencedKv.getValueLength()));
-        PColumn column = new PColumnImpl(colName, famName, dataType, maxLength, scale, isNullable, position-1, sortOrder, arraySize, viewConstant, isViewReferenced);
+        Cell columnDefKv = colKeyValues[COLUMN_DEF_INDEX];
+        String expressionStr = columnDefKv==null ? null : (String)PVarchar.INSTANCE.toObject(columnDefKv.getValueArray(), columnDefKv.getValueOffset(), columnDefKv.getValueLength());
+        PColumn column = new PColumnImpl(colName, famName, dataType, maxLength, scale, isNullable, position-1, sortOrder, arraySize, viewConstant, isViewReferenced, expressionStr);
         columns.add(column);
     }
 
@@ -1399,15 +1410,17 @@ public class MetaDataEndpointImpl extends MetaDataProtocol implements Coprocesso
                                     // column, get lock and drop the index. If found as covered
                                     // column, delete from index (do this client side?).
                                     // In either case, invalidate index if the column is in it
+                                    PhoenixConnection connection = QueryUtil.getConnection(env.getConfiguration()).unwrap(PhoenixConnection.class);
                                     for (PTable index : table.getIndexes()) {
                                         try {
-                                            String indexColumnName = IndexUtil.getIndexColumnName(columnToDelete);
-                                            PColumn indexColumn = index.getColumn(indexColumnName);
+                                            IndexMaintainer indexMaintainer = index.getIndexMaintainer(table, connection);
+                                            // get the columns required to create the index 
+                                            Set<ColumnReference> indexColumns = indexMaintainer.getIndexedColumns();
                                             byte[] indexKey =
                                                     SchemaUtil.getTableKey(tenantId, index
                                                             .getSchemaName().getBytes(), index.getTableName().getBytes());
-                                            // If index contains the column in it's PK, then drop it
-                                            if (SchemaUtil.isPKColumn(indexColumn)) {
+                                            // If index requires this column, then drop it
+                                            if (indexColumns.contains(new ColumnReference(columnToDelete.getFamilyName().getBytes(), columnToDelete.getName().getBytes()))) {
                                                 // Since we're dropping the index, lock it to ensure
                                                 // that a change in index state doesn't
                                                 // occur while we're dropping it.
@@ -1439,6 +1452,7 @@ public class MetaDataEndpointImpl extends MetaDataProtocol implements Coprocesso
                                     return new MetaDataMutationResult(
                                             MutationCode.COLUMN_NOT_FOUND, EnvironmentEdgeManager
                                                     .currentTimeMillis(), table, columnToDelete);
+                                } catch (ClassNotFoundException e1) {
                                 }
                             }
                         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
index 5bbd5d3..a3b2faa 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
@@ -66,6 +66,7 @@ import org.apache.phoenix.expression.aggregator.ServerAggregators;
 import org.apache.phoenix.hbase.index.ValueGetter;
 import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.hbase.index.util.GenericKeyValueBuilder;
+import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
 import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.index.PhoenixIndexCodec;
@@ -270,7 +271,7 @@ public class UngroupedAggregateRegionObserver extends BaseScannerRegionObserver{
                             for (IndexMaintainer maintainer : indexMaintainers) {
                                 if (!results.isEmpty()) {
                                     result.getKey(ptr);
-                                    ValueGetter valueGetter = maintainer.createGetterFromKeyValues(results);
+                                    ValueGetter valueGetter = maintainer.createGetterFromKeyValues(ImmutableBytesPtr.copyBytesIfNecessary(ptr),results);
                                     Put put = maintainer.buildUpdateMutation(kvBuilder, valueGetter, ptr, ts, c.getEnvironment().getRegion().getStartKey(), c.getEnvironment().getRegion().getEndKey());
                                     indexMutations.add(put);
                                 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/PTableProtos.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/PTableProtos.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/PTableProtos.java
index 3ff3dd6..7d389ac 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/PTableProtos.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/PTableProtos.java
@@ -234,6 +234,21 @@ public final class PTableProtos {
      * <code>optional bool viewReferenced = 11;</code>
      */
     boolean getViewReferenced();
+
+    // optional string expression = 12;
+    /**
+     * <code>optional string expression = 12;</code>
+     */
+    boolean hasExpression();
+    /**
+     * <code>optional string expression = 12;</code>
+     */
+    java.lang.String getExpression();
+    /**
+     * <code>optional string expression = 12;</code>
+     */
+    com.google.protobuf.ByteString
+        getExpressionBytes();
   }
   /**
    * Protobuf type {@code PColumn}
@@ -341,6 +356,11 @@ public final class PTableProtos {
               viewReferenced_ = input.readBool();
               break;
             }
+            case 98: {
+              bitField0_ |= 0x00000800;
+              expression_ = input.readBytes();
+              break;
+            }
           }
         }
       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
@@ -584,6 +604,49 @@ public final class PTableProtos {
       return viewReferenced_;
     }
 
+    // optional string expression = 12;
+    public static final int EXPRESSION_FIELD_NUMBER = 12;
+    private java.lang.Object expression_;
+    /**
+     * <code>optional string expression = 12;</code>
+     */
+    public boolean hasExpression() {
+      return ((bitField0_ & 0x00000800) == 0x00000800);
+    }
+    /**
+     * <code>optional string expression = 12;</code>
+     */
+    public java.lang.String getExpression() {
+      java.lang.Object ref = expression_;
+      if (ref instanceof java.lang.String) {
+        return (java.lang.String) ref;
+      } else {
+        com.google.protobuf.ByteString bs = 
+            (com.google.protobuf.ByteString) ref;
+        java.lang.String s = bs.toStringUtf8();
+        if (bs.isValidUtf8()) {
+          expression_ = s;
+        }
+        return s;
+      }
+    }
+    /**
+     * <code>optional string expression = 12;</code>
+     */
+    public com.google.protobuf.ByteString
+        getExpressionBytes() {
+      java.lang.Object ref = expression_;
+      if (ref instanceof java.lang.String) {
+        com.google.protobuf.ByteString b = 
+            com.google.protobuf.ByteString.copyFromUtf8(
+                (java.lang.String) ref);
+        expression_ = b;
+        return b;
+      } else {
+        return (com.google.protobuf.ByteString) ref;
+      }
+    }
+
     private void initFields() {
       columnNameBytes_ = com.google.protobuf.ByteString.EMPTY;
       familyNameBytes_ = com.google.protobuf.ByteString.EMPTY;
@@ -596,6 +659,7 @@ public final class PTableProtos {
       arraySize_ = 0;
       viewConstant_ = com.google.protobuf.ByteString.EMPTY;
       viewReferenced_ = false;
+      expression_ = "";
     }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
@@ -662,6 +726,9 @@ public final class PTableProtos {
       if (((bitField0_ & 0x00000400) == 0x00000400)) {
         output.writeBool(11, viewReferenced_);
       }
+      if (((bitField0_ & 0x00000800) == 0x00000800)) {
+        output.writeBytes(12, getExpressionBytes());
+      }
       getUnknownFields().writeTo(output);
     }
 
@@ -715,6 +782,10 @@ public final class PTableProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeBoolSize(11, viewReferenced_);
       }
+      if (((bitField0_ & 0x00000800) == 0x00000800)) {
+        size += com.google.protobuf.CodedOutputStream
+          .computeBytesSize(12, getExpressionBytes());
+      }
       size += getUnknownFields().getSerializedSize();
       memoizedSerializedSize = size;
       return size;
@@ -793,6 +864,11 @@ public final class PTableProtos {
         result = result && (getViewReferenced()
             == other.getViewReferenced());
       }
+      result = result && (hasExpression() == other.hasExpression());
+      if (hasExpression()) {
+        result = result && getExpression()
+            .equals(other.getExpression());
+      }
       result = result &&
           getUnknownFields().equals(other.getUnknownFields());
       return result;
@@ -850,6 +926,10 @@ public final class PTableProtos {
         hash = (37 * hash) + VIEWREFERENCED_FIELD_NUMBER;
         hash = (53 * hash) + hashBoolean(getViewReferenced());
       }
+      if (hasExpression()) {
+        hash = (37 * hash) + EXPRESSION_FIELD_NUMBER;
+        hash = (53 * hash) + getExpression().hashCode();
+      }
       hash = (29 * hash) + getUnknownFields().hashCode();
       memoizedHashCode = hash;
       return hash;
@@ -981,6 +1061,8 @@ public final class PTableProtos {
         bitField0_ = (bitField0_ & ~0x00000200);
         viewReferenced_ = false;
         bitField0_ = (bitField0_ & ~0x00000400);
+        expression_ = "";
+        bitField0_ = (bitField0_ & ~0x00000800);
         return this;
       }
 
@@ -1053,6 +1135,10 @@ public final class PTableProtos {
           to_bitField0_ |= 0x00000400;
         }
         result.viewReferenced_ = viewReferenced_;
+        if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
+          to_bitField0_ |= 0x00000800;
+        }
+        result.expression_ = expression_;
         result.bitField0_ = to_bitField0_;
         onBuilt();
         return result;
@@ -1104,6 +1190,11 @@ public final class PTableProtos {
         if (other.hasViewReferenced()) {
           setViewReferenced(other.getViewReferenced());
         }
+        if (other.hasExpression()) {
+          bitField0_ |= 0x00000800;
+          expression_ = other.expression_;
+          onChanged();
+        }
         this.mergeUnknownFields(other.getUnknownFields());
         return this;
       }
@@ -1564,6 +1655,80 @@ public final class PTableProtos {
         return this;
       }
 
+      // optional string expression = 12;
+      private java.lang.Object expression_ = "";
+      /**
+       * <code>optional string expression = 12;</code>
+       */
+      public boolean hasExpression() {
+        return ((bitField0_ & 0x00000800) == 0x00000800);
+      }
+      /**
+       * <code>optional string expression = 12;</code>
+       */
+      public java.lang.String getExpression() {
+        java.lang.Object ref = expression_;
+        if (!(ref instanceof java.lang.String)) {
+          java.lang.String s = ((com.google.protobuf.ByteString) ref)
+              .toStringUtf8();
+          expression_ = s;
+          return s;
+        } else {
+          return (java.lang.String) ref;
+        }
+      }
+      /**
+       * <code>optional string expression = 12;</code>
+       */
+      public com.google.protobuf.ByteString
+          getExpressionBytes() {
+        java.lang.Object ref = expression_;
+        if (ref instanceof String) {
+          com.google.protobuf.ByteString b = 
+              com.google.protobuf.ByteString.copyFromUtf8(
+                  (java.lang.String) ref);
+          expression_ = b;
+          return b;
+        } else {
+          return (com.google.protobuf.ByteString) ref;
+        }
+      }
+      /**
+       * <code>optional string expression = 12;</code>
+       */
+      public Builder setExpression(
+          java.lang.String value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000800;
+        expression_ = value;
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string expression = 12;</code>
+       */
+      public Builder clearExpression() {
+        bitField0_ = (bitField0_ & ~0x00000800);
+        expression_ = getDefaultInstance().getExpression();
+        onChanged();
+        return this;
+      }
+      /**
+       * <code>optional string expression = 12;</code>
+       */
+      public Builder setExpressionBytes(
+          com.google.protobuf.ByteString value) {
+        if (value == null) {
+    throw new NullPointerException();
+  }
+  bitField0_ |= 0x00000800;
+        expression_ = value;
+        onChanged();
+        return this;
+      }
+
       // @@protoc_insertion_point(builder_scope:PColumn)
     }
 
@@ -6294,35 +6459,35 @@ public final class PTableProtos {
       descriptor;
   static {
     java.lang.String[] descriptorData = {
-      "\n\014PTable.proto\032\021PGuidePosts.proto\"\347\001\n\007PC" +
+      "\n\014PTable.proto\032\021PGuidePosts.proto\"\373\001\n\007PC" +
       "olumn\022\027\n\017columnNameBytes\030\001 \002(\014\022\027\n\017family" +
       "NameBytes\030\002 \001(\014\022\020\n\010dataType\030\003 \002(\t\022\021\n\tmax" +
       "Length\030\004 \001(\005\022\r\n\005scale\030\005 \001(\005\022\020\n\010nullable\030" +
       "\006 \002(\010\022\020\n\010position\030\007 \002(\005\022\021\n\tsortOrder\030\010 \002" +
       "(\005\022\021\n\tarraySize\030\t \001(\005\022\024\n\014viewConstant\030\n " +
-      "\001(\014\022\026\n\016viewReferenced\030\013 \001(\010\"\232\001\n\013PTableSt" +
-      "ats\022\013\n\003key\030\001 \002(\014\022\016\n\006values\030\002 \003(\014\022\033\n\023guid" +
-      "ePostsByteCount\030\003 \001(\003\022\025\n\rkeyBytesCount\030\004" +
-      " \001(\003\022\027\n\017guidePostsCount\030\005 \001(\005\022!\n\013pGuideP",
-      "osts\030\006 \001(\0132\014.PGuidePosts\"\266\004\n\006PTable\022\027\n\017s" +
-      "chemaNameBytes\030\001 \002(\014\022\026\n\016tableNameBytes\030\002" +
-      " \002(\014\022\036\n\ttableType\030\003 \002(\0162\013.PTableType\022\022\n\n" +
-      "indexState\030\004 \001(\t\022\026\n\016sequenceNumber\030\005 \002(\003" +
-      "\022\021\n\ttimeStamp\030\006 \002(\003\022\023\n\013pkNameBytes\030\007 \001(\014" +
-      "\022\021\n\tbucketNum\030\010 \002(\005\022\031\n\007columns\030\t \003(\0132\010.P" +
-      "Column\022\030\n\007indexes\030\n \003(\0132\007.PTable\022\027\n\017isIm" +
-      "mutableRows\030\013 \002(\010\022 \n\nguidePosts\030\014 \003(\0132\014." +
-      "PTableStats\022\032\n\022dataTableNameBytes\030\r \001(\014\022" +
-      "\031\n\021defaultFamilyName\030\016 \001(\014\022\022\n\ndisableWAL",
-      "\030\017 \002(\010\022\023\n\013multiTenant\030\020 \002(\010\022\020\n\010viewType\030" +
-      "\021 \001(\014\022\025\n\rviewStatement\030\022 \001(\014\022\025\n\rphysical" +
-      "Names\030\023 \003(\014\022\020\n\010tenantId\030\024 \001(\014\022\023\n\013viewInd" +
-      "exId\030\025 \001(\005\022\021\n\tindexType\030\026 \001(\014\022\026\n\016statsTi" +
-      "meStamp\030\027 \001(\003\022\022\n\nstoreNulls\030\030 \001(\010*A\n\nPTa" +
-      "bleType\022\n\n\006SYSTEM\020\000\022\010\n\004USER\020\001\022\010\n\004VIEW\020\002\022" +
-      "\t\n\005INDEX\020\003\022\010\n\004JOIN\020\004B@\n(org.apache.phoen" +
-      "ix.coprocessor.generatedB\014PTableProtosH\001" +
-      "\210\001\001\240\001\001"
+      "\001(\014\022\026\n\016viewReferenced\030\013 \001(\010\022\022\n\nexpressio" +
+      "n\030\014 \001(\t\"\232\001\n\013PTableStats\022\013\n\003key\030\001 \002(\014\022\016\n\006" +
+      "values\030\002 \003(\014\022\033\n\023guidePostsByteCount\030\003 \001(" +
+      "\003\022\025\n\rkeyBytesCount\030\004 \001(\003\022\027\n\017guidePostsCo",
+      "unt\030\005 \001(\005\022!\n\013pGuidePosts\030\006 \001(\0132\014.PGuideP" +
+      "osts\"\266\004\n\006PTable\022\027\n\017schemaNameBytes\030\001 \002(\014" +
+      "\022\026\n\016tableNameBytes\030\002 \002(\014\022\036\n\ttableType\030\003 " +
+      "\002(\0162\013.PTableType\022\022\n\nindexState\030\004 \001(\t\022\026\n\016" +
+      "sequenceNumber\030\005 \002(\003\022\021\n\ttimeStamp\030\006 \002(\003\022" +
+      "\023\n\013pkNameBytes\030\007 \001(\014\022\021\n\tbucketNum\030\010 \002(\005\022" +
+      "\031\n\007columns\030\t \003(\0132\010.PColumn\022\030\n\007indexes\030\n " +
+      "\003(\0132\007.PTable\022\027\n\017isImmutableRows\030\013 \002(\010\022 \n" +
+      "\nguidePosts\030\014 \003(\0132\014.PTableStats\022\032\n\022dataT" +
+      "ableNameBytes\030\r \001(\014\022\031\n\021defaultFamilyName",
+      "\030\016 \001(\014\022\022\n\ndisableWAL\030\017 \002(\010\022\023\n\013multiTenan" +
+      "t\030\020 \002(\010\022\020\n\010viewType\030\021 \001(\014\022\025\n\rviewStateme" +
+      "nt\030\022 \001(\014\022\025\n\rphysicalNames\030\023 \003(\014\022\020\n\010tenan" +
+      "tId\030\024 \001(\014\022\023\n\013viewIndexId\030\025 \001(\005\022\021\n\tindexT" +
+      "ype\030\026 \001(\014\022\026\n\016statsTimeStamp\030\027 \001(\003\022\022\n\nsto" +
+      "reNulls\030\030 \001(\010*A\n\nPTableType\022\n\n\006SYSTEM\020\000\022" +
+      "\010\n\004USER\020\001\022\010\n\004VIEW\020\002\022\t\n\005INDEX\020\003\022\010\n\004JOIN\020\004" +
+      "B@\n(org.apache.phoenix.coprocessor.gener" +
+      "atedB\014PTableProtosH\001\210\001\001\240\001\001"
     };
     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -6334,7 +6499,7 @@ public final class PTableProtos {
           internal_static_PColumn_fieldAccessorTable = new
             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
               internal_static_PColumn_descriptor,
-              new java.lang.String[] { "ColumnNameBytes", "FamilyNameBytes", "DataType", "MaxLength", "Scale", "Nullable", "Position", "SortOrder", "ArraySize", "ViewConstant", "ViewReferenced", });
+              new java.lang.String[] { "ColumnNameBytes", "FamilyNameBytes", "DataType", "MaxLength", "Scale", "Nullable", "Position", "SortOrder", "ArraySize", "ViewConstant", "ViewReferenced", "Expression", });
           internal_static_PTableStats_descriptor =
             getDescriptor().getMessageTypes().get(1);
           internal_static_PTableStats_fieldAccessorTable = new

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
index e3ee9e8..8a6b8d0 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
@@ -150,6 +150,12 @@ public enum SQLExceptionCode {
      READ_ONLY_CONNECTION(518,"25502","Mutations are not permitted for a read-only connection."),
  
      VARBINARY_ARRAY_NOT_SUPPORTED(519, "42896", "VARBINARY ARRAY is not supported"),
+    
+     /**
+      *  Expression Index exceptions.
+      */
+     AGGREGATE_EXPRESSION_NOT_ALLOWED_IN_INDEX(520, "42897", "Aggreagaate expression are not allowed in an index"),
+     NON_DETERMINISTIC_EXPRESSION_NOT_ALLOWED_IN_INDEX(521, "42898", "Non-deterministic expression are not allowed in an index"),
 
      /** 
      * HBase and Phoenix specific implementation defined sub-classes.

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java
index b58de50..94233c8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/BaseQueryPlan.java
@@ -258,7 +258,7 @@ public abstract class BaseQueryPlan implements QueryPlan {
             }
         }
         ImmutableBytesWritable ptr = new ImmutableBytesWritable();
-        IndexMaintainer.serialize(dataTable, ptr, indexes);
+        IndexMaintainer.serialize(dataTable, ptr, indexes, context.getConnection());
         scan.setAttribute(BaseScannerRegionObserver.LOCAL_INDEX_BUILD, ByteUtil.copyKeyBytesIfNecessary(ptr));
     }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
index cfa58fd..04626a6 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/MutationState.java
@@ -225,7 +225,7 @@ public class MutationState implements SQLCloseable {
                 try {
                     indexMutations =
                             IndexUtil.generateIndexData(tableRef.getTable(), index, mutationsPertainingToIndex,
-                                tempPtr, connection.getKeyValueBuilder());
+                                tempPtr, connection.getKeyValueBuilder(), connection);
                 } catch (SQLException e) {
                     throw new IllegalDataException(e);
                 }
@@ -368,7 +368,7 @@ public class MutationState implements SQLCloseable {
             Map<ImmutableBytesPtr,Map<PColumn,byte[]>> valuesMap = entry.getValue();
             TableRef tableRef = entry.getKey();
             PTable table = tableRef.getTable();
-            table.getIndexMaintainers(tempPtr);
+            table.getIndexMaintainers(tempPtr, connection);
             boolean hasIndexMaintainers = tempPtr.getLength() > 0;
             boolean isDataTable = true;
             long serverTimestamp = serverTimeStamps[i++];

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/expression/CoerceExpression.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/CoerceExpression.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/CoerceExpression.java
index 811ed47..b0396e8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/expression/CoerceExpression.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/CoerceExpression.java
@@ -85,7 +85,7 @@ public class CoerceExpression extends BaseSingleExpression {
     @Override
     public int hashCode() {
         final int prime = 31;
-        int result = 1;
+        int result = super.hashCode();
         result = prime * result + ((maxLength == null) ? 0 : maxLength.hashCode());
         result = prime * result + ((toSortOrder == null) ? 0 : toSortOrder.hashCode());
         result = prime * result + ((toType == null) ? 0 : toType.hashCode());
@@ -95,14 +95,16 @@ public class CoerceExpression extends BaseSingleExpression {
     @Override
     public boolean equals(Object obj) {
         if (this == obj) return true;
-        if (obj == null) return false;
+        if (!super.equals(obj)) return false;
         if (getClass() != obj.getClass()) return false;
         CoerceExpression other = (CoerceExpression)obj;
         if (maxLength == null) {
             if (other.maxLength != null) return false;
         } else if (!maxLength.equals(other.maxLength)) return false;
         if (toSortOrder != other.toSortOrder) return false;
-        if (toType != other.toType) return false;
+        if (toType == null) {
+            if (other.toType != null) return false;
+        } else if (!toType.equals(other.toType)) return false;
         return true;
     }
 
@@ -122,7 +124,7 @@ public class CoerceExpression extends BaseSingleExpression {
         WritableUtils.writeVInt(output, toSortOrder.getSystemValue());
         WritableUtils.writeVInt(output, maxLength == null ? -1 : maxLength);
     }
-
+    
     @Override
     public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
         if (getChild().evaluate(tuple, ptr)) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/expression/RowKeyColumnExpression.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/RowKeyColumnExpression.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/RowKeyColumnExpression.java
index 240d013..e4ec438 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/expression/RowKeyColumnExpression.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/RowKeyColumnExpression.java
@@ -23,10 +23,10 @@ import java.io.IOException;
 
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.phoenix.expression.visitor.ExpressionVisitor;
-import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.PDatum;
 import org.apache.phoenix.schema.RowKeyValueAccessor;
 import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.util.ByteUtil;
 
 
@@ -78,6 +78,10 @@ public class RowKeyColumnExpression  extends ColumnExpression {
     public int getPosition() {
         return accessor.getIndex();
     }
+    
+    public String getName() {
+        return name;
+    }
 
     @Override
     public int hashCode() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/ValueGetter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/ValueGetter.java b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/ValueGetter.java
index 0e321a7..a6e36cb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/ValueGetter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/ValueGetter.java
@@ -33,4 +33,6 @@ public interface ValueGetter {
    * @throws IOException if there is an error accessing the underlying data storage
    */
   public ImmutableBytesPtr getLatestValue(ColumnReference ref) throws IOException;
+  
+  public byte[] getRowKey();
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/LazyValueGetter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/LazyValueGetter.java b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/LazyValueGetter.java
index 43c4028..96a7410 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/LazyValueGetter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/covered/data/LazyValueGetter.java
@@ -84,4 +84,9 @@ public class LazyValueGetter implements ValueGetter {
     }
     return null;
   }
+
+  @Override
+  public byte[] getRowKey() {
+	return this.row; 
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
index b4ba12d..dc72059 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/hbase/index/util/IndexManagementUtil.java
@@ -101,24 +101,6 @@ public class IndexManagementUtil {
 
     }
 
-    public static ValueGetter createGetterFromKeyValues(Collection<Cell> pendingUpdates) {
-        final Map<ReferencingColumn, ImmutableBytesPtr> valueMap = Maps.newHashMapWithExpectedSize(pendingUpdates
-                .size());
-        for (Cell kv : pendingUpdates) {
-            // create new pointers to each part of the kv
-            ImmutableBytesPtr family = new ImmutableBytesPtr(kv.getRowArray(),kv.getFamilyOffset(),kv.getFamilyLength());
-            ImmutableBytesPtr qual = new ImmutableBytesPtr(kv.getRowArray(), kv.getQualifierOffset(), kv.getQualifierLength());
-            ImmutableBytesPtr value = new ImmutableBytesPtr(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength());
-            valueMap.put(new ReferencingColumn(family, qual), value);
-        }
-        return new ValueGetter() {
-            @Override
-            public ImmutableBytesPtr getLatestValue(ColumnReference ref) throws IOException {
-                return valueMap.get(ReferencingColumn.wrap(ref));
-            }
-        };
-    }
-
     public static class ReferencingColumn {
         ImmutableBytesPtr family;
         ImmutableBytesPtr qual;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java b/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
index 61b6e68..31f6c76 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/index/IndexMaintainer.java
@@ -23,6 +23,7 @@ import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -42,15 +43,27 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.phoenix.compile.ColumnResolver;
+import org.apache.phoenix.compile.FromCompiler;
+import org.apache.phoenix.compile.IndexExpressionCompiler;
+import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.ExpressionType;
+import org.apache.phoenix.expression.KeyValueColumnExpression;
+import org.apache.phoenix.expression.visitor.KeyValueExpressionVisitor;
 import org.apache.phoenix.hbase.index.ValueGetter;
 import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
-import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
 import org.apache.phoenix.hbase.index.util.IndexManagementUtil.ReferencingColumn;
+import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixStatement;
+import org.apache.phoenix.parse.ParseNode;
+import org.apache.phoenix.parse.SQLParser;
 import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.ColumnNotFoundException;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PColumnFamily;
-import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.PDatum;
 import org.apache.phoenix.schema.PIndexState;
 import org.apache.phoenix.schema.PTable;
@@ -59,8 +72,11 @@ import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.RowKeySchema;
 import org.apache.phoenix.schema.SaltingUtil;
 import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.schema.ValueSchema;
 import org.apache.phoenix.schema.ValueSchema.Field;
+import org.apache.phoenix.schema.tuple.ValueGetterTuple;
+import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.util.BitSet;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.IndexUtil;
@@ -88,41 +104,15 @@ import com.google.common.collect.Sets;
  * @since 2.1.0
  */
 public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
-    
-    public static IndexMaintainer create(PTable dataTable, PTable index) {
+
+    private static final int EXPRESSION_NOT_PRESENT = -1;
+    private static final int ESTIMATED_EXPRESSION_SIZE = 8;
+
+	public static IndexMaintainer create(PTable dataTable, PTable index, PhoenixConnection connection) {
         if (dataTable.getType() == PTableType.INDEX || index.getType() != PTableType.INDEX || !dataTable.getIndexes().contains(index)) {
             throw new IllegalArgumentException();
         }
-        IndexMaintainer maintainer = new IndexMaintainer(dataTable, index);
-        int indexPosOffset = (index.getBucketNum() == null ? 0 : 1) + (maintainer.isMultiTenant ? 1 : 0) + (maintainer.viewIndexId == null ? 0 : 1);
-        RowKeyMetaData rowKeyMetaData = maintainer.getRowKeyMetaData();
-        int indexColByteSize = 0;
-        for (int i = indexPosOffset; i < index.getPKColumns().size(); i++) {
-            PColumn indexColumn = index.getPKColumns().get(i);
-            int indexPos = i - indexPosOffset;
-            PColumn column = IndexUtil.getDataColumn(dataTable, indexColumn.getName().getString());
-            boolean isPKColumn = SchemaUtil.isPKColumn(column);
-            if (isPKColumn) {
-                int dataPkPos = dataTable.getPKColumns().indexOf(column) - (dataTable.getBucketNum() == null ? 0 : 1) - (maintainer.isMultiTenant ? 1 : 0);
-                rowKeyMetaData.setIndexPkPosition(dataPkPos, indexPos);
-            } else {
-                indexColByteSize += column.getDataType().isFixedWidth() ? SchemaUtil.getFixedByteSize(column) : ValueSchema.ESTIMATED_VARIABLE_LENGTH_SIZE;
-                maintainer.getIndexedColumnTypes().add(column.getDataType());
-                maintainer.getIndexedColumns().add(new ColumnReference(column.getFamilyName().getBytes(), column.getName().getBytes()));
-            }
-            if (indexColumn.getSortOrder() == SortOrder.DESC) {
-                rowKeyMetaData.getDescIndexColumnBitSet().set(indexPos);
-            }
-        }
-        for (int i = 0; i < index.getColumnFamilies().size(); i++) {
-            PColumnFamily family = index.getColumnFamilies().get(i);
-            for (PColumn indexColumn : family.getColumns()) {
-                PColumn column = IndexUtil.getDataColumn(dataTable, indexColumn.getName().getString());
-                maintainer.getCoverededColumns().add(new ColumnReference(column.getFamilyName().getBytes(), column.getName().getBytes()));
-            }
-        }
-        maintainer.estimatedIndexRowKeyBytes = maintainer.estimateIndexRowKeyByteSize(indexColByteSize);
-        maintainer.initCachedState();
+        IndexMaintainer maintainer = new IndexMaintainer(dataTable, index, connection);
         return maintainer;
     }
     
@@ -158,9 +148,9 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
      * @param dataTable data table
      * @param ptr bytes pointer to hold returned serialized value
      */
-    public static void serialize(PTable dataTable, ImmutableBytesWritable ptr) {
+    public static void serialize(PTable dataTable, ImmutableBytesWritable ptr, PhoenixConnection connection) {
         List<PTable> indexes = dataTable.getIndexes();
-        serialize(dataTable, ptr, indexes);
+        serialize(dataTable, ptr, indexes, connection);
     }
 
     /**
@@ -170,7 +160,7 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
      * @param indexes indexes to serialize
      */
     public static void serialize(PTable dataTable, ImmutableBytesWritable ptr,
-            List<PTable> indexes) {
+            List<PTable> indexes, PhoenixConnection connection) {
         Iterator<PTable> indexesItr = nonDisabledIndexIterator(indexes.iterator());
         if ((dataTable.isImmutableRows()) || !indexesItr.hasNext()) {
             indexesItr = enabledLocalIndexIterator(indexesItr);
@@ -184,7 +174,7 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         while (indexesItr.hasNext()) {
             nIndexes++;
             PTable index = indexesItr.next();
-            estimatedSize += index.getIndexMaintainer(dataTable).getEstimatedByteSize();
+            estimatedSize += index.getIndexMaintainer(dataTable, connection).getEstimatedByteSize();
         }
         TrustedByteArrayOutputStream stream = new TrustedByteArrayOutputStream(estimatedSize + 1);
         DataOutput output = new DataOutputStream(stream);
@@ -197,7 +187,7 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
                     dataTable.isImmutableRows() ? enabledLocalIndexIterator(indexes.iterator())
                             : nonDisabledIndexIterator(indexes.iterator());
             while (indexesItr.hasNext()) {
-                    indexesItr.next().getIndexMaintainer(dataTable).write(output);
+                    indexesItr.next().getIndexMaintainer(dataTable, connection).write(output);
             }
         } catch (IOException e) {
             throw new RuntimeException(e); // Impossible
@@ -238,9 +228,14 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
 
     private byte[] viewIndexId;
     private boolean isMultiTenant;
+    // indexed expressions that are not present in the row key of the data table, the expression can also refer to a regular column
+    private List<Expression> indexedExpressions;
+    // columns required to evaluate all expressions in indexedExpressions (this does not include columns in the data row key)
     private Set<ColumnReference> indexedColumns;
     private Set<ColumnReference> coveredColumns;
+    // columns required to create index row i.e. indexedColumns + coveredColumns  (this does not include columns in the data row key)
     private Set<ColumnReference> allColumns;
+    // TODO remove this in the next major release
     private List<PDataType> indexedColumnTypes;
     private RowKeyMetaData rowKeyMetaData;
     private byte[] indexTableName;
@@ -258,6 +253,7 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
     
     private List<ImmutableBytesPtr> indexQualifiers;
     private int estimatedIndexRowKeyBytes;
+    private int estimatedExpressionSize;
     private int[] dataPkPosition;
     private int maxTrailingNulls;
     private ColumnReference dataEmptyKeyValueRef;
@@ -267,32 +263,43 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         this.isDataTableSalted = isDataTableSalted;
     }
 
-    private IndexMaintainer(PTable dataTable, PTable index) {
+    private IndexMaintainer(PTable dataTable, PTable index, PhoenixConnection connection) {
         this(dataTable.getRowKeySchema(), dataTable.getBucketNum() != null);
         this.isMultiTenant = dataTable.isMultiTenant();
         this.viewIndexId = index.getViewIndexId() == null ? null : MetaDataUtil.getViewIndexIdDataType().toBytes(index.getViewIndexId());
         this.isLocalIndex = index.getIndexType() == IndexType.LOCAL;
 
-        RowKeySchema dataRowKeySchema = dataTable.getRowKeySchema();
-        boolean isDataTableSalted = dataTable.getBucketNum() != null;
         byte[] indexTableName = index.getPhysicalName().getBytes();
         // Use this for the nDataSaltBuckets as we need this for local indexes
         // TODO: persist nDataSaltBuckets separately, but maintain b/w compat.
         Integer nIndexSaltBuckets = isLocalIndex ? dataTable.getBucketNum() : index.getBucketNum();
         boolean indexWALDisabled = index.isWALDisabled();
         int indexPosOffset = (index.getBucketNum() == null ? 0 : 1) + (this.isMultiTenant ? 1 : 0) + (this.viewIndexId == null ? 0 : 1);
+//        int indexPosOffset = !isLocalIndex && nIndexSaltBuckets > 0 ? 1 : 0;
         int nIndexColumns = index.getColumns().size() - indexPosOffset;
         int nIndexPKColumns = index.getPKColumns().size() - indexPosOffset;
-        int indexedColumnsCount = 0;
-        for (int i  = indexPosOffset; i<index.getPKColumns().size();i++) {
-            PColumn indexColumn = index.getPKColumns().get(i);
-            PColumn column = IndexUtil.getDataColumn(dataTable, indexColumn.getName().getString());
-            boolean isPKColumn = SchemaUtil.isPKColumn(column);
-            if (!isPKColumn) {
-                indexedColumnsCount++;
-            } 
+        // number of expressions that are indexed that are not present in the row key of the data table
+        int indexedExpressionCount = 0;
+        for (int i = indexPosOffset; i<index.getPKColumns().size();i++) {
+        	PColumn indexColumn = index.getPKColumns().get(i);
+        	if (!IndexUtil.isIndexColumn(indexColumn)) {
+                continue;
+            }
+        	String indexColumnName = indexColumn.getName().getString();
+            String dataFamilyName = IndexUtil.getDataColumnFamilyName(indexColumnName);
+            String dataColumnName = IndexUtil.getDataColumnName(indexColumnName);
+            try {
+                PColumn dataColumn = dataFamilyName.equals("") ? dataTable.getColumn(dataColumnName) : dataTable.getColumnFamily(dataFamilyName).getColumn(dataColumnName);
+                if (SchemaUtil.isPKColumn(dataColumn)) 
+                    continue;
+            } catch (ColumnNotFoundException e) {
+             // This column must be an expression
+            } catch (Exception e) {
+                throw new IllegalArgumentException(e);
+            }
+            indexedExpressionCount++;
         }
-        int indexPkColumnCount = this.dataRowKeySchema.getFieldCount() + indexedColumnsCount - (isDataTableSalted ? 1 : 0) - (isMultiTenant ? 1 : 0);
+        int indexPkColumnCount = this.dataRowKeySchema.getFieldCount() + indexedExpressionCount  - (this.isDataTableSalted ? 1 : 0) - (this.isMultiTenant ? 1 : 0);
         this.rowKeyMetaData = newRowKeyMetaData(indexPkColumnCount);
         BitSet bitSet = this.rowKeyMetaData.getViewConstantColumnBitSet();
 
@@ -312,12 +319,9 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
             }
         }
         this.indexTableName = indexTableName;
-        this.indexedColumns = Sets.newLinkedHashSetWithExpectedSize(nIndexPKColumns-nDataPKColumns);
         this.indexedColumnTypes = Lists.<PDataType>newArrayListWithExpectedSize(nIndexPKColumns-nDataPKColumns);
+        this.indexedExpressions = Lists.newArrayListWithExpectedSize(nIndexPKColumns-nDataPKColumns);
         this.coveredColumns = Sets.newLinkedHashSetWithExpectedSize(nIndexColumns-nIndexPKColumns);
-        this.allColumns = Sets.newLinkedHashSetWithExpectedSize(nDataPKColumns + nIndexColumns);
-        this.allColumns.addAll(indexedColumns);
-        this.allColumns.addAll(coveredColumns);
         this.nIndexSaltBuckets  = nIndexSaltBuckets == null ? 0 : nIndexSaltBuckets;
         this.dataEmptyKeyValueCF = SchemaUtil.getEmptyColumnFamily(dataTable);
         this.emptyKeyValueCFPtr = SchemaUtil.getEmptyColumnFamilyPtr(index);
@@ -326,6 +330,60 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         // TODO: check whether index is immutable or not. Currently it's always false so checking
         // data table is with immutable rows or not.
         this.immutableRows = dataTable.isImmutableRows();
+        int indexColByteSize = 0;
+        ColumnResolver resolver = null;
+        try {
+            resolver = FromCompiler.getResolver(new TableRef(dataTable));
+        } catch (SQLException e) {
+            throw new RuntimeException(e); // Impossible
+        }
+        StatementContext context = new StatementContext(new PhoenixStatement(connection), resolver);
+        IndexExpressionCompiler expressionIndexCompiler = new IndexExpressionCompiler(context);
+        for (int i = indexPosOffset; i < index.getPKColumns().size(); i++) {
+            PColumn indexColumn = index.getPKColumns().get(i);
+            if (!IndexUtil.isIndexColumn(indexColumn)) {
+                continue;
+            }
+            int indexPos = i - indexPosOffset;
+            Expression expression = null;
+            try {
+                expressionIndexCompiler.reset();
+                ParseNode parseNode  = SQLParser.parseCondition(indexColumn.getExpressionStr());
+                expression = parseNode.accept(expressionIndexCompiler);
+            } catch (SQLException e) {
+                throw new RuntimeException(e); // Impossible
+            }
+            if ( expressionIndexCompiler.getColumnRef()!=null ) {
+            	// get the column of the data table that corresponds to this index column
+	            PColumn column = IndexUtil.getDataColumn(dataTable, indexColumn.getName().getString());
+	            boolean isPKColumn = SchemaUtil.isPKColumn(column);
+	            if (isPKColumn) {
+	                int dataPkPos = dataTable.getPKColumns().indexOf(column) - (dataTable.getBucketNum() == null ? 0 : 1) - (this.isMultiTenant ? 1 : 0);
+	                this.rowKeyMetaData.setIndexPkPosition(dataPkPos, indexPos);
+	            } else {
+	                indexColByteSize += column.getDataType().isFixedWidth() ? SchemaUtil.getFixedByteSize(column) : ValueSchema.ESTIMATED_VARIABLE_LENGTH_SIZE;
+	                this.indexedExpressions.add(expression);
+	            }
+            }
+            else {
+            	indexColByteSize += expression.getDataType().isFixedWidth() ? SchemaUtil.getFixedByteSize(expression) : ValueSchema.ESTIMATED_VARIABLE_LENGTH_SIZE;
+                this.indexedExpressions.add(expression);
+            }
+            // set the sort order of the expression correctly
+            if (indexColumn.getSortOrder() == SortOrder.DESC) {
+                this.rowKeyMetaData.getDescIndexColumnBitSet().set(indexPos);
+            }
+        }
+        this.estimatedExpressionSize = expressionIndexCompiler.getTotalNodeCount() * ESTIMATED_EXPRESSION_SIZE;
+        for (int i = 0; i < index.getColumnFamilies().size(); i++) {
+            PColumnFamily family = index.getColumnFamilies().get(i);
+            for (PColumn indexColumn : family.getColumns()) {
+                PColumn column = IndexUtil.getDataColumn(dataTable, indexColumn.getName().getString());
+                this.coveredColumns.add(new ColumnReference(column.getFamilyName().getBytes(), column.getName().getBytes()));
+            }
+        }
+        this.estimatedIndexRowKeyBytes = estimateIndexRowKeyByteSize(indexColByteSize);
+        initCachedState();
     }
 
     public byte[] buildRowKey(ValueGetter valueGetter, ImmutableBytesWritable rowKeyPtr, byte[] regionStartKey, byte[] regionEndKey)  {
@@ -388,30 +446,26 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
                 } 
             }
             BitSet descIndexColumnBitSet = rowKeyMetaData.getDescIndexColumnBitSet();
-            int j = 0;
-            Iterator<ColumnReference> iterator = indexedColumns.iterator();
+            Iterator<Expression> expressionIterator = indexedExpressions.iterator();
             for (int i = 0; i < nIndexedColumns; i++) {
                 PDataType dataColumnType;
-                boolean isNullable = true;
-                boolean isDataColumnInverted = false;
-                SortOrder dataSortOrder = SortOrder.getDefault();
-                if (dataPkPosition[i] == -1) {
-                    dataColumnType = indexedColumnTypes.get(j);
-                    ImmutableBytesPtr value = valueGetter.getLatestValue(iterator.next());
-                    if (value == null) {
-                        ptr.set(ByteUtil.EMPTY_BYTE_ARRAY);
-                    } else {
-                        ptr.set(value.copyBytesIfNecessary());
-                    }
-                    j++;
-               } else {
+                boolean isNullable;
+                SortOrder dataSortOrder;
+                if (dataPkPosition[i] == EXPRESSION_NOT_PRESENT) {
+                	Expression expression = expressionIterator.next();
+                	dataColumnType = expression.getDataType();
+                	dataSortOrder = expression.getSortOrder();
+                    isNullable = expression.isNullable();
+                	expression.evaluate(new ValueGetterTuple(valueGetter), ptr);
+                }
+                else {
                     Field field = dataRowKeySchema.getField(dataPkPosition[i]);
                     dataColumnType = field.getDataType();
                     ptr.set(rowKeyPtr.get(), dataRowKeyLocator[0][i], dataRowKeyLocator[1][i]);
                     dataSortOrder = field.getSortOrder();
-                    isDataColumnInverted = dataSortOrder != SortOrder.ASC;
                     isNullable = field.isNullable();
                 }
+                boolean isDataColumnInverted = dataSortOrder != SortOrder.ASC;
                 PDataType indexColumnType = IndexUtil.getIndexColumnDataType(isNullable, dataColumnType);
                 boolean isBytesComparable = dataColumnType.isBytesComparableWith(indexColumnType) ;
                 if (isBytesComparable && isDataColumnInverted == descIndexColumnBitSet.get(i)) {
@@ -643,10 +697,10 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
                 indexFields[pos] = dataRowKeySchema.getField(i);
             } 
         }
-        int indexedColumnTypesIndex = 0;
+        Iterator<Expression> expressionSetItr = indexedExpressions.iterator();
         for (Field indexField : indexFields) {
             if (indexField == null) { // Add field for kv column in index
-                final PDataType dataType = indexedColumnTypes.get(indexedColumnTypesIndex++);
+                final PDataType dataType = expressionSetItr.next().getDataType();
                 builder.addField(new PDatum() {
 
                     @Override
@@ -823,10 +877,6 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         return coveredColumns;
     }
 
-    public Set<ColumnReference> getIndexedColumns() {
-        return indexedColumns;
-    }
-
     public Set<ColumnReference> getAllColumns() {
         return allColumns;
     }
@@ -838,14 +888,6 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         return emptyKeyValueCFPtr;
     }
 
-    private RowKeyMetaData getRowKeyMetaData() {
-        return rowKeyMetaData;
-    }
-    
-    private List<PDataType> getIndexedColumnTypes() {
-        return indexedColumnTypes;
-    }
-
     @Override
     public void readFields(DataInput input) throws IOException {
         int encodedIndexSaltBucketsAndMultiTenant = WritableUtils.readVInt(input);
@@ -881,7 +923,62 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         }
         indexTableName = Bytes.readByteArray(input);
         dataEmptyKeyValueCF = Bytes.readByteArray(input);
-        emptyKeyValueCFPtr = new ImmutableBytesPtr(Bytes.readByteArray(input));
+        int len = WritableUtils.readVInt(input);
+        //TODO remove this in the next major release
+        boolean isNewClient = false;
+        if (len < 0) {
+          isNewClient = true;
+          len=Math.abs(len);
+        }
+        byte [] emptyKeyValueCF = new byte[len];
+        input.readFully(emptyKeyValueCF, 0, len);
+        emptyKeyValueCFPtr = new ImmutableBytesPtr(emptyKeyValueCF);
+        
+        if (isNewClient) {
+            int numIndexedExpressions = WritableUtils.readVInt(input);
+            indexedExpressions = Lists.newArrayListWithExpectedSize(numIndexedExpressions);        
+            for (int i = 0; i < numIndexedExpressions; i++) {
+            	Expression expression = ExpressionType.values()[WritableUtils.readVInt(input)].newInstance();
+            	expression.readFields(input);
+            	indexedExpressions.add(expression);
+            }
+        }
+        else {
+            indexedExpressions = Lists.newArrayListWithExpectedSize(indexedColumns.size());
+            Iterator<ColumnReference> colReferenceIter = indexedColumns.iterator();
+            Iterator<PDataType> dataTypeIter = indexedColumnTypes.iterator();
+            while (colReferenceIter.hasNext()) {
+                ColumnReference colRef = colReferenceIter.next();
+                final PDataType dataType = dataTypeIter.next();
+                indexedExpressions.add(new KeyValueColumnExpression(new PDatum() {
+                    
+                    @Override
+                    public boolean isNullable() {
+                        return true;
+                    }
+                    
+                    @Override
+                    public SortOrder getSortOrder() {
+                        return SortOrder.getDefault();
+                    }
+                    
+                    @Override
+                    public Integer getScale() {
+                        return null;
+                    }
+                    
+                    @Override
+                    public Integer getMaxLength() {
+                        return null;
+                    }
+                    
+                    @Override
+                    public PDataType getDataType() {
+                        return dataType;
+                    }
+                }, colRef.getFamily(), colRef.getQualifier()));
+            }
+        }
         
         rowKeyMetaData = newRowKeyMetaData();
         rowKeyMetaData.readFields(input);
@@ -908,6 +1005,7 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
             Bytes.writeByteArray(output, ref.getFamily());
             Bytes.writeByteArray(output, ref.getQualifier());
         }
+        //TODO remove indexedColumnTypes in the next major release
         for (int i = 0; i < indexedColumnTypes.size(); i++) {
             PDataType type = indexedColumnTypes.get(i);
             WritableUtils.writeVInt(output, type.ordinal());
@@ -920,9 +1018,17 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         }
         Bytes.writeByteArray(output, indexTableName);
         Bytes.writeByteArray(output, dataEmptyKeyValueCF);
-        WritableUtils.writeVInt(output,emptyKeyValueCFPtr.getLength());
+        // TODO in order to maintain b/w compatibility encode emptyKeyValueCFPtr.getLength() as a negative value (so we can distinguish between new and old clients)
+        // when indexedColumnTypes is removed, remove this 
+        WritableUtils.writeVInt(output,-emptyKeyValueCFPtr.getLength());
         output.write(emptyKeyValueCFPtr.get(),emptyKeyValueCFPtr.getOffset(), emptyKeyValueCFPtr.getLength());
         
+        WritableUtils.writeVInt(output, indexedExpressions.size());
+        for (Expression expression : indexedExpressions) {
+        	WritableUtils.writeVInt(output, ExpressionType.valueOf(expression).ordinal());
+        	expression.write(output);
+        }
+        
         rowKeyMetaData.write(output);
         // Encode indexWALDisabled in nDataCFs
         WritableUtils.writeVInt(output, (nDataCFs + 1) * (indexWALDisabled ? -1 : 1));
@@ -941,7 +1047,10 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
             size += WritableUtils.getVIntSize(ref.getQualifier().length);
             size += ref.getQualifier().length;
         }
-        size += indexedColumnTypes.size();
+        for (int i = 0; i < indexedColumnTypes.size(); i++) {
+            PDataType type = indexedColumnTypes.get(i);
+            size += WritableUtils.getVIntSize(type.ordinal());
+        }
         size += WritableUtils.getVIntSize(coveredColumns.size());
         for (ColumnReference ref : coveredColumns) {
             size += WritableUtils.getVIntSize(ref.getFamily().length);
@@ -954,13 +1063,18 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         size += dataEmptyKeyValueCF.length + WritableUtils.getVIntSize(dataEmptyKeyValueCF.length);
         size += emptyKeyValueCFPtr.getLength() + WritableUtils.getVIntSize(emptyKeyValueCFPtr.getLength());
         size += WritableUtils.getVIntSize(nDataCFs+1);
+        size += WritableUtils.getVIntSize(indexedExpressions.size());
+        for (Expression expression : indexedExpressions) {
+            size += WritableUtils.getVIntSize(ExpressionType.valueOf(expression).ordinal());
+        }
+        size += estimatedExpressionSize;
         return size;
     }
     
     private int estimateIndexRowKeyByteSize(int indexColByteSize) {
         int estimatedIndexRowKeyBytes = indexColByteSize + dataRowKeySchema.getEstimatedValueLength() + (nIndexSaltBuckets == 0 || isLocalIndex || this.isDataTableSalted ? 0 : SaltingUtil.NUM_SALTING_BYTES);
         return estimatedIndexRowKeyBytes;
-   }
+    }
     
     /**
      * Init calculated state reading/creating
@@ -976,20 +1090,33 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
                 ref.getFamily(), ref.getQualifier())));
         }
 
-        this.allColumns = Sets.newLinkedHashSetWithExpectedSize(indexedColumns.size() + coveredColumns.size());
+        this.allColumns = Sets.newLinkedHashSetWithExpectedSize(indexedExpressions.size() + coveredColumns.size());
+        // columns that are required to evaluate all expressions in indexedExpressions (not including columns in data row key)
+        this.indexedColumns = Sets.newLinkedHashSetWithExpectedSize(indexedExpressions.size());
+        for (Expression expression : indexedExpressions) {
+        	KeyValueExpressionVisitor visitor = new KeyValueExpressionVisitor() {
+                @Override
+                public Void visit(KeyValueColumnExpression expression) {
+                	indexedColumns.add(new ColumnReference(expression.getColumnFamily(), expression.getColumnName()));
+                    indexedColumnTypes.add(expression.getDataType());
+                    return null;
+                }
+            };
+            expression.accept(visitor);
+        }
         allColumns.addAll(indexedColumns);
         allColumns.addAll(coveredColumns);
         
         int dataPkOffset = (isDataTableSalted ? 1 : 0) + (isMultiTenant ? 1 : 0);
         int nIndexPkColumns = getIndexPkColumnCount();
         dataPkPosition = new int[nIndexPkColumns];
-        Arrays.fill(dataPkPosition, -1);
+        Arrays.fill(dataPkPosition, EXPRESSION_NOT_PRESENT);
         int numViewConstantColumns = 0;
         BitSet viewConstantColumnBitSet = rowKeyMetaData.getViewConstantColumnBitSet();
         for (int i = dataPkOffset; i < dataRowKeySchema.getFieldCount(); i++) {
             if (!viewConstantColumnBitSet.get(i)) {
-                int dataPkPosition = rowKeyMetaData.getIndexPkPosition(i-dataPkOffset);
-                this.dataPkPosition[dataPkPosition] = i;
+                int indexPkPosition = rowKeyMetaData.getIndexPkPosition(i-dataPkOffset);
+                this.dataPkPosition[indexPkPosition] = i;
             } else {
                 numViewConstantColumns++;
             }
@@ -998,15 +1125,15 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         // Calculate the max number of trailing nulls that we should get rid of after building the index row key.
         // We only get rid of nulls for variable length types, so we have to be careful to consider the type of the
         // index table, not the data type of the data table
-        int indexedColumnTypesPos = indexedColumnTypes.size()-1;
+        int expressionsPos = indexedExpressions.size();
         int indexPkPos = nIndexPkColumns - numViewConstantColumns - 1;
         while (indexPkPos >= 0) {
             int dataPkPos = dataPkPosition[indexPkPos];
             boolean isDataNullable;
             PDataType dataType;
-            if (dataPkPos == -1) {
+            if (dataPkPos == EXPRESSION_NOT_PRESENT) {
                 isDataNullable = true;
-                dataType = indexedColumnTypes.get(indexedColumnTypesPos--);
+                dataType = indexedExpressions.get(--expressionsPos).getDataType();
             } else {
                 Field dataField = dataRowKeySchema.getField(dataPkPos);
                 dataType = dataField.getDataType();
@@ -1022,7 +1149,7 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
     }
 
     private int getIndexPkColumnCount() {
-        return dataRowKeySchema.getFieldCount() + indexedColumns.size() - (isDataTableSalted ? 1 : 0) - (isMultiTenant ? 1 : 0);
+        return dataRowKeySchema.getFieldCount() + indexedExpressions.size() - (isDataTableSalted ? 1 : 0) - (isMultiTenant ? 1 : 0);
     }
     
     private RowKeyMetaData newRowKeyMetaData() {
@@ -1178,7 +1305,7 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         return allColumns.iterator();
     }
 
-    public ValueGetter createGetterFromKeyValues(Collection<? extends Cell> pendingUpdates) {
+    public ValueGetter createGetterFromKeyValues(final byte[] rowKey, Collection<? extends Cell> pendingUpdates) {
         final Map<ReferencingColumn, ImmutableBytesPtr> valueMap = Maps.newHashMapWithExpectedSize(pendingUpdates
                 .size());
         for (Cell kv : pendingUpdates) {
@@ -1190,10 +1317,14 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
         }
         return new ValueGetter() {
             @Override
-            public ImmutableBytesPtr getLatestValue(ColumnReference ref) throws IOException {
+            public ImmutableBytesPtr getLatestValue(ColumnReference ref) {
                 if(ref.equals(dataEmptyKeyValueRef)) return null;
                 return valueMap.get(ReferencingColumn.wrap(ref));
             }
+            @Override
+            public byte[] getRowKey() {
+            	return rowKey;
+            }
         };
     }
 
@@ -1208,4 +1339,8 @@ public class IndexMaintainer implements Writable, Iterable<ColumnReference> {
     public boolean isImmutableRows() {
         return immutableRows;
     }
+    
+    public Set<ColumnReference> getIndexedColumns() {
+        return indexedColumns;
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexCodec.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexCodec.java b/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexCodec.java
index 48a7868..99e26d1 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexCodec.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexCodec.java
@@ -154,7 +154,7 @@ public class PhoenixIndexCodec extends BaseIndexCodec {
                 } else {
                     indexUpdate.setTable(maintainer.getIndexTableName());
                 }
-                valueGetter = maintainer.createGetterFromKeyValues(state.getPendingUpdate());
+                valueGetter = maintainer.createGetterFromKeyValues(dataRowKey, state.getPendingUpdate());
             } else {
                 // TODO: if more efficient, I could do this just once with all columns in all indexes
                 Pair<Scanner,IndexUpdate> statePair = state.getIndexedColumnsTableState(maintainer.getAllColumns());

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexFailurePolicy.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexFailurePolicy.java b/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexFailurePolicy.java
index 1c98c5c..2fd168a 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexFailurePolicy.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/index/PhoenixIndexFailurePolicy.java
@@ -19,7 +19,13 @@ package org.apache.phoenix.index;
 
 import java.io.IOException;
 import java.sql.SQLException;
-import java.util.*;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -37,28 +43,27 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.ipc.BlockingRpcCallback;
 import org.apache.hadoop.hbase.ipc.ServerRpcController;
 import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
-
-import com.google.common.collect.Multimap;
-
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode;
-import org.apache.phoenix.hbase.index.table.HTableInterfaceReference;
-import org.apache.phoenix.hbase.index.write.KillServerOnFailurePolicy;
 import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse;
 import org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService;
 import org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest;
+import org.apache.phoenix.hbase.index.table.HTableInterfaceReference;
+import org.apache.phoenix.hbase.index.write.KillServerOnFailurePolicy;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.protobuf.ProtobufUtil;
 import org.apache.phoenix.schema.PIndexState;
-import org.apache.phoenix.schema.types.PLong;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
+import org.apache.phoenix.schema.types.PLong;
 import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.SchemaUtil;
 
+import com.google.common.collect.Multimap;
+
 /**
  * 
  * Handler called in the event that index updates cannot be written to their
@@ -219,7 +224,7 @@ public class PhoenixIndexFailurePolicy extends  KillServerOnFailurePolicy {
                 return Collections.emptySet();
             }
 
-            IndexMaintainer indexMaintainer = localIndex.getIndexMaintainer(dataTable);
+            IndexMaintainer indexMaintainer = localIndex.getIndexMaintainer(dataTable, conn);
             HRegionInfo regionInfo = this.env.getRegion().getRegionInfo();
             int offset =
                     regionInfo.getStartKey().length == 0 ? regionInfo.getEndKey().length

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
index 76a1ad1..b26f408 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
@@ -166,6 +166,7 @@ public class PhoenixDatabaseMetaData implements DatabaseMetaData, org.apache.pho
     public static final String NULLABLE = "NULLABLE";
     public static final byte[] NULLABLE_BYTES = Bytes.toBytes(NULLABLE);
     public static final String COLUMN_DEF = "COLUMN_DEF";
+    public static final byte[] COLUMN_DEF_BYTES = Bytes.toBytes(COLUMN_DEF);
     public static final String SQL_DATA_TYPE = "SQL_DATA_TYPE";
     public static final String SQL_DATETIME_SUB = "SQL_DATETIME_SUB";
     public static final String CHAR_OCTET_LENGTH = "CHAR_OCTET_LENGTH";

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
index 93212bc..4ca5bb5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
@@ -81,6 +81,7 @@ import org.apache.phoenix.parse.DropTableStatement;
 import org.apache.phoenix.parse.ExplainStatement;
 import org.apache.phoenix.parse.FilterableStatement;
 import org.apache.phoenix.parse.HintNode;
+import org.apache.phoenix.parse.IndexKeyConstraint;
 import org.apache.phoenix.parse.LimitNode;
 import org.apache.phoenix.parse.NamedNode;
 import org.apache.phoenix.parse.NamedTableNode;
@@ -521,9 +522,9 @@ public class PhoenixStatement implements Statement, SQLCloseable, org.apache.pho
 
     private static class ExecutableCreateIndexStatement extends CreateIndexStatement implements CompilableStatement {
 
-        public ExecutableCreateIndexStatement(NamedNode indexName, NamedTableNode dataTable, PrimaryKeyConstraint pkConstraint, List<ColumnName> includeColumns, List<ParseNode> splits,
+        public ExecutableCreateIndexStatement(NamedNode indexName, NamedTableNode dataTable, IndexKeyConstraint ikConstraint, List<ColumnName> includeColumns, List<ParseNode> splits,
                 ListMultimap<String,Pair<String,Object>> props, boolean ifNotExists, IndexType indexType, int bindCount) {
-            super(indexName, dataTable, pkConstraint, includeColumns, splits, props, ifNotExists, indexType, bindCount);
+            super(indexName, dataTable, ikConstraint, includeColumns, splits, props, ifNotExists, indexType, bindCount);
         }
 
         @SuppressWarnings("unchecked")
@@ -852,9 +853,9 @@ public class PhoenixStatement implements Statement, SQLCloseable, org.apache.pho
         }
         
         @Override
-        public CreateIndexStatement createIndex(NamedNode indexName, NamedTableNode dataTable, PrimaryKeyConstraint pkConstraint, List<ColumnName> includeColumns, List<ParseNode> splits,
+        public CreateIndexStatement createIndex(NamedNode indexName, NamedTableNode dataTable, IndexKeyConstraint ikConstraint, List<ColumnName> includeColumns, List<ParseNode> splits,
                 ListMultimap<String,Pair<String,Object>> props, boolean ifNotExists, IndexType indexType, int bindCount) {
-            return new ExecutableCreateIndexStatement(indexName, dataTable, pkConstraint, includeColumns, splits, props, ifNotExists, indexType, bindCount);
+            return new ExecutableCreateIndexStatement(indexName, dataTable, ikConstraint, includeColumns, splits, props, ifNotExists, indexType, bindCount);
         }
         
         @Override


[4/4] phoenix git commit: PHOENIX-514 Support functional indexes (Thomas D'Silva)

Posted by ja...@apache.org.
PHOENIX-514 Support functional indexes (Thomas D'Silva)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8c340f5a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8c340f5a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8c340f5a

Branch: refs/heads/master
Commit: 8c340f5a6c030d009a4fa75444096a1999aea5c5
Parents: d6e7846
Author: James Taylor <ja...@apache.org>
Authored: Wed Feb 4 12:30:51 2015 -0800
Committer: James Taylor <ja...@apache.org>
Committed: Wed Feb 4 12:30:51 2015 -0800

----------------------------------------------------------------------
 .../org/apache/phoenix/end2end/HashJoinIT.java  |  12 +-
 .../apache/phoenix/end2end/PercentileIT.java    |   7 +-
 .../java/org/apache/phoenix/end2end/ViewIT.java |   2 +-
 .../phoenix/end2end/index/ImmutableIndexIT.java |  76 +-
 .../end2end/index/IndexExpressionIT.java        | 866 +++++++++++++++++++
 .../phoenix/end2end/index/IndexMetadataIT.java  |  20 +-
 phoenix-core/src/main/antlr3/PhoenixSQL.g       |  32 +-
 .../IndexHalfStoreFileReaderGenerator.java      |   4 +-
 .../apache/phoenix/compile/DeleteCompiler.java  |   4 +-
 .../phoenix/compile/ExpressionCompiler.java     |   9 +-
 .../apache/phoenix/compile/FromCompiler.java    |   6 +-
 .../apache/phoenix/compile/HavingCompiler.java  |   4 +-
 .../compile/IndexExpressionCompiler.java        |  53 ++
 .../apache/phoenix/compile/JoinCompiler.java    |   8 +-
 .../phoenix/compile/PostIndexDDLCompiler.java   |  47 +-
 .../apache/phoenix/compile/UpsertCompiler.java  |   2 +-
 .../apache/phoenix/compile/WhereCompiler.java   |   4 +-
 .../coprocessor/MetaDataEndpointImpl.java       |  44 +-
 .../UngroupedAggregateRegionObserver.java       |   3 +-
 .../coprocessor/generated/PTableProtos.java     | 215 ++++-
 .../phoenix/exception/SQLExceptionCode.java     |   6 +
 .../apache/phoenix/execute/BaseQueryPlan.java   |   2 +-
 .../apache/phoenix/execute/MutationState.java   |   4 +-
 .../phoenix/expression/CoerceExpression.java    |  10 +-
 .../expression/RowKeyColumnExpression.java      |   6 +-
 .../apache/phoenix/hbase/index/ValueGetter.java |   2 +
 .../index/covered/data/LazyValueGetter.java     |   5 +
 .../hbase/index/util/IndexManagementUtil.java   |  18 -
 .../apache/phoenix/index/IndexMaintainer.java   | 333 ++++---
 .../apache/phoenix/index/PhoenixIndexCodec.java |   2 +-
 .../index/PhoenixIndexFailurePolicy.java        |  21 +-
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   1 +
 .../apache/phoenix/jdbc/PhoenixStatement.java   |   9 +-
 .../apache/phoenix/optimize/QueryOptimizer.java |  12 +-
 .../apache/phoenix/parse/BetweenParseNode.java  |  22 +
 .../org/apache/phoenix/parse/BindParseNode.java |  22 +
 .../org/apache/phoenix/parse/CastParseNode.java |  38 +
 .../org/apache/phoenix/parse/ColumnDef.java     |  14 +-
 .../org/apache/phoenix/parse/ColumnName.java    |   2 +-
 .../apache/phoenix/parse/ColumnParseNode.java   |   1 +
 .../apache/phoenix/parse/CompoundParseNode.java |  32 +-
 .../phoenix/parse/CreateIndexStatement.java     |  10 +-
 .../apache/phoenix/parse/ExistsParseNode.java   |  22 +
 .../phoenix/parse/FamilyWildcardParseNode.java  |  22 +
 .../apache/phoenix/parse/FunctionParseNode.java |  31 +
 .../apache/phoenix/parse/InListParseNode.java   |  22 +
 .../org/apache/phoenix/parse/InParseNode.java   |  25 +
 .../parse/IndexExpressionParseNodeRewriter.java | 104 +++
 .../phoenix/parse/IndexKeyConstraint.java       |  12 +-
 .../apache/phoenix/parse/IsNullParseNode.java   |  22 +
 .../org/apache/phoenix/parse/LikeParseNode.java |  26 +
 .../apache/phoenix/parse/LiteralParseNode.java  |  21 +
 .../org/apache/phoenix/parse/NamedNode.java     |   2 +-
 .../apache/phoenix/parse/NamedParseNode.java    |  30 +
 .../apache/phoenix/parse/ParseNodeFactory.java  |  34 +-
 .../phoenix/parse/SequenceValueParseNode.java   |  29 +
 .../apache/phoenix/parse/SubqueryParseNode.java |  28 +
 .../org/apache/phoenix/parse/TableName.java     |   3 +-
 .../phoenix/parse/TableWildcardParseNode.java   |  29 +
 .../apache/phoenix/parse/WildcardParseNode.java |  24 +-
 .../apache/phoenix/schema/DelegateColumn.java   |   6 +
 .../apache/phoenix/schema/DelegateTable.java    |   9 +-
 .../apache/phoenix/schema/MetaDataClient.java   | 173 ++--
 .../java/org/apache/phoenix/schema/PColumn.java |   3 +
 .../org/apache/phoenix/schema/PColumnImpl.java  |  26 +-
 .../apache/phoenix/schema/PMetaDataImpl.java    |   2 +-
 .../java/org/apache/phoenix/schema/PTable.java  |   7 +-
 .../org/apache/phoenix/schema/PTableImpl.java   |  27 +-
 .../org/apache/phoenix/schema/SaltingUtil.java  |   2 +-
 .../phoenix/schema/tuple/ValueGetterTuple.java  |  93 ++
 .../java/org/apache/phoenix/util/IndexUtil.java |  15 +-
 .../expression/ColumnExpressionTest.java        |   8 +-
 .../phoenix/index/IndexMaintainerTest.java      |  14 +-
 .../iterate/AggregateResultScannerTest.java     |   5 +
 .../java/org/apache/phoenix/query/BaseTest.java |  26 +-
 phoenix-protocol/src/main/PTable.proto          |   1 +
 76 files changed, 2475 insertions(+), 418 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
index 781bfea..76eab22 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
@@ -289,7 +289,7 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
                 "CLIENT MERGE SORT\n" +
                 "    PARALLEL INNER-JOIN TABLE 0\n" +
                 "        CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_ORDER_TABLE_DISPLAY_NAME + "\n" +
-                "            SERVER FILTER BY order_id != '000000000000003'\n" +
+                "            SERVER FILTER BY \"order_id\" != '000000000000003'\n" +
                 "            PARALLEL INNER-JOIN TABLE 0\n" +
                 "                CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_ITEM_TABLE_DISPLAY_NAME + "\n" +
                 "                    SERVER FILTER BY NAME != 'T3'\n" +
@@ -380,7 +380,7 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
                 "CLIENT MERGE SORT\n" +
                 "    PARALLEL INNER-JOIN TABLE 0\n" +
                 "        CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_ORDER_TABLE_DISPLAY_NAME + "\n" +
-                "            SERVER FILTER BY order_id != '000000000000003'\n" +
+                "            SERVER FILTER BY \"order_id\" != '000000000000003'\n" +
                 "            PARALLEL INNER-JOIN TABLE 0\n" +
                 "                CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_ITEM_TABLE_DISPLAY_NAME + "\n" +
                 "                    SERVER FILTER BY NAME != 'T3'\n" +
@@ -655,7 +655,7 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
                 "CLIENT MERGE SORT\n" +
                 "    PARALLEL INNER-JOIN TABLE 0\n" +
                 "        CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_ORDER_TABLE_DISPLAY_NAME + "\n" +
-                "            SERVER FILTER BY order_id != '000000000000003'\n" +
+                "            SERVER FILTER BY \"order_id\" != '000000000000003'\n" +
                 "            PARALLEL INNER-JOIN TABLE 0\n" +
                 "                CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_SCHEMA + ".idx_item\n" +
                 "                    SERVER FILTER BY NAME != 'T3'\n" +
@@ -747,7 +747,7 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
                 "CLIENT MERGE SORT\n" +
                 "    PARALLEL INNER-JOIN TABLE 0\n" +
                 "        CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_ORDER_TABLE_DISPLAY_NAME + "\n" +
-                "            SERVER FILTER BY order_id != '000000000000003'\n" +
+                "            SERVER FILTER BY \"order_id\" != '000000000000003'\n" +
                 "            PARALLEL INNER-JOIN TABLE 0\n" +
                 "                CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_SCHEMA + ".idx_item\n" +
                 "                    SERVER FILTER BY NAME != 'T3'\n" +
@@ -1039,7 +1039,7 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
                 "CLIENT MERGE SORT\n" +
                 "    PARALLEL INNER-JOIN TABLE 0\n" +
                 "        CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_ORDER_TABLE_DISPLAY_NAME + "\n" +
-                "            SERVER FILTER BY order_id != '000000000000003'\n" +
+                "            SERVER FILTER BY \"order_id\" != '000000000000003'\n" +
                 "            PARALLEL INNER-JOIN TABLE 0\n" +
                 "                CLIENT PARALLEL 1-WAY RANGE SCAN OVER " + MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX +""+ JOIN_ITEM_TABLE_DISPLAY_NAME +" [-32768]\n" +
                 "                    SERVER FILTER BY NAME != 'T3'\n" +
@@ -1134,7 +1134,7 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
                 "CLIENT MERGE SORT\n" +
                 "    PARALLEL INNER-JOIN TABLE 0\n" +
                 "        CLIENT PARALLEL 1-WAY FULL SCAN OVER " + JOIN_ORDER_TABLE_DISPLAY_NAME + "\n" +
-                "            SERVER FILTER BY order_id != '000000000000003'\n" +
+                "            SERVER FILTER BY \"order_id\" != '000000000000003'\n" +
                 "            PARALLEL INNER-JOIN TABLE 0\n" +
                 "                CLIENT PARALLEL 1-WAY RANGE SCAN OVER " +  MetaDataUtil.LOCAL_INDEX_TABLE_PREFIX +""+JOIN_ITEM_TABLE_DISPLAY_NAME + " [-32768]\n" +
                 "                    SERVER FILTER BY NAME != 'T3'\n" +

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PercentileIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PercentileIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PercentileIT.java
index 685daeb..8109694 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PercentileIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PercentileIT.java
@@ -50,6 +50,7 @@ import java.sql.Types;
 import java.util.Properties;
 
 import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.junit.Test;
@@ -516,15 +517,17 @@ public class PercentileIT extends BaseClientManagedTimeIT {
     private static void populateINDEX_DATA_TABLETable() throws SQLException {
         Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
         Connection conn = DriverManager.getConnection(getUrl(), props);
+        Date date = DateUtil.parseDate("2015-01-01 00:00:00");
         try {
             String upsert = "UPSERT INTO " + INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + INDEX_DATA_TABLE
-                    + " VALUES(?, ?, ?, ?, ?)";
+                    + " VALUES(?, ?, ?, ?, ?, ?)";
             PreparedStatement stmt = conn.prepareStatement(upsert);
             stmt.setString(1, "varchar1");
             stmt.setString(2, "char1");
             stmt.setInt(3, 1);
             stmt.setLong(4, 1L);
             stmt.setBigDecimal(5, new BigDecimal(1.0));
+            stmt.setDate(6, date);
             stmt.executeUpdate();
             
             stmt.setString(1, "varchar2");
@@ -532,6 +535,7 @@ public class PercentileIT extends BaseClientManagedTimeIT {
             stmt.setInt(3, 2);
             stmt.setLong(4, 2L);
             stmt.setBigDecimal(5, new BigDecimal(2.0));
+            stmt.setDate(6, date);
             stmt.executeUpdate();
             
             stmt.setString(1, "varchar3");
@@ -539,6 +543,7 @@ public class PercentileIT extends BaseClientManagedTimeIT {
             stmt.setInt(3, 3);
             stmt.setLong(4, 3L);
             stmt.setBigDecimal(5, new BigDecimal(3.0));
+            stmt.setDate(6, date);
             stmt.executeUpdate();
             
             conn.commit();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index aa26f9b..9a89531 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -424,6 +424,6 @@ public class ViewIT extends BaseViewIT {
         String queryPlan = QueryUtil.getExplainPlan(rs);
         assertEquals(
                 "CLIENT PARALLEL 1-WAY SKIP SCAN ON 4 KEYS OVER I1 [1,100] - [2,109]\n" + 
-                "    SERVER FILTER BY (S2 = 'bas' AND S1 = 'foo')", queryPlan);
+                "    SERVER FILTER BY (S2 = 'bas' AND \"S1\" = 'foo')", queryPlan);
     }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexIT.java
index 55b38a5..9eb9a57 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexIT.java
@@ -27,6 +27,7 @@ import static org.junit.Assert.fail;
 
 import java.math.BigDecimal;
 import java.sql.Connection;
+import java.sql.Date;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
@@ -38,6 +39,7 @@ import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.PTableKey;
+import org.apache.phoenix.util.DateUtil;
 import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.QueryUtil;
 import org.junit.Test;
@@ -50,23 +52,27 @@ public class ImmutableIndexIT extends BaseHBaseManagedTimeIT {
         Connection conn = DriverManager.getConnection(getUrl(), props);
         try {
             String upsert = "UPSERT INTO " + INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + INDEX_DATA_TABLE
-                    + " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
+                    + " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
             PreparedStatement stmt = conn.prepareStatement(upsert);
             stmt.setString(1, "varchar1");
             stmt.setString(2, "char1");
             stmt.setInt(3, 1);
             stmt.setLong(4, 1L);
             stmt.setBigDecimal(5, new BigDecimal(1.0));
-            stmt.setString(6, "varchar_a");
-            stmt.setString(7, "chara");
-            stmt.setInt(8, 2);
-            stmt.setLong(9, 2L);
-            stmt.setBigDecimal(10, new BigDecimal(2.0));
-            stmt.setString(11, "varchar_b");
-            stmt.setString(12, "charb");
-            stmt.setInt(13, 3);
-            stmt.setLong(14, 3L);
-            stmt.setBigDecimal(15, new BigDecimal(3.0));
+            Date date = DateUtil.parseDate("2015-01-01 00:00:00");
+            stmt.setDate(6, date);
+            stmt.setString(7, "varchar_a");
+            stmt.setString(8, "chara");
+            stmt.setInt(9, 2);
+            stmt.setLong(10, 2L);
+            stmt.setBigDecimal(11, new BigDecimal(2.0));
+            stmt.setDate(12, date);
+            stmt.setString(13, "varchar_b");
+            stmt.setString(14, "charb");
+            stmt.setInt(15, 3);
+            stmt.setLong(16, 3L);
+            stmt.setBigDecimal(17, new BigDecimal(3.0));
+            stmt.setDate(18, date);
             stmt.executeUpdate();
             
             stmt.setString(1, "varchar2");
@@ -74,16 +80,20 @@ public class ImmutableIndexIT extends BaseHBaseManagedTimeIT {
             stmt.setInt(3, 2);
             stmt.setLong(4, 2L);
             stmt.setBigDecimal(5, new BigDecimal(2.0));
-            stmt.setString(6, "varchar_a");
-            stmt.setString(7, "chara");
-            stmt.setInt(8, 3);
-            stmt.setLong(9, 3L);
-            stmt.setBigDecimal(10, new BigDecimal(3.0));
-            stmt.setString(11, "varchar_b");
-            stmt.setString(12, "charb");
-            stmt.setInt(13, 4);
-            stmt.setLong(14, 4L);
-            stmt.setBigDecimal(15, new BigDecimal(4.0));
+            date = DateUtil.parseDate("2015-01-02 00:00:00");
+            stmt.setDate(6, date);
+            stmt.setString(7, "varchar_a");
+            stmt.setString(8, "chara");
+            stmt.setInt(9, 3);
+            stmt.setLong(10, 3L);
+            stmt.setBigDecimal(11, new BigDecimal(3.0));
+            stmt.setDate(12, date);
+            stmt.setString(13, "varchar_b");
+            stmt.setString(14, "charb");
+            stmt.setInt(15, 4);
+            stmt.setLong(16, 4L);
+            stmt.setBigDecimal(17, new BigDecimal(4.0));
+            stmt.setDate(18, date);
             stmt.executeUpdate();
             
             stmt.setString(1, "varchar3");
@@ -91,16 +101,20 @@ public class ImmutableIndexIT extends BaseHBaseManagedTimeIT {
             stmt.setInt(3, 3);
             stmt.setLong(4, 3L);
             stmt.setBigDecimal(5, new BigDecimal(3.0));
-            stmt.setString(6, "varchar_a");
-            stmt.setString(7, "chara");
-            stmt.setInt(8, 4);
-            stmt.setLong(9, 4L);
-            stmt.setBigDecimal(10, new BigDecimal(4.0));
-            stmt.setString(11, "varchar_b");
-            stmt.setString(12, "charb");
-            stmt.setInt(13, 5);
-            stmt.setLong(14, 5L);
-            stmt.setBigDecimal(15, new BigDecimal(5.0));
+            date = DateUtil.parseDate("2015-01-03 00:00:00");
+            stmt.setDate(6, date);
+            stmt.setString(7, "varchar_a");
+            stmt.setString(8, "chara");
+            stmt.setInt(9, 4);
+            stmt.setLong(10, 4L);
+            stmt.setBigDecimal(11, new BigDecimal(4.0));
+            stmt.setDate(12, date);
+            stmt.setString(13, "varchar_b");
+            stmt.setString(14, "charb");
+            stmt.setInt(15, 5);
+            stmt.setLong(16, 5L);
+            stmt.setBigDecimal(17, new BigDecimal(5.0));
+            stmt.setDate(18, date);
             stmt.executeUpdate();
             
             conn.commit();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
new file mode 100644
index 0000000..28124b6
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexExpressionIT.java
@@ -0,0 +1,866 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
+ * file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
+ * License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by
+ * applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language
+ * governing permissions and limitations under the License.
+ */
+package org.apache.phoenix.end2end.index;
+
+import static org.apache.phoenix.util.TestUtil.INDEX_DATA_SCHEMA;
+import static org.apache.phoenix.util.TestUtil.INDEX_DATA_TABLE;
+import static org.apache.phoenix.util.TestUtil.MUTABLE_INDEX_DATA_TABLE;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.math.BigDecimal;
+import java.sql.Connection;
+import java.sql.Date;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Properties;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
+import org.apache.phoenix.exception.SQLExceptionCode;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.util.DateUtil;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.QueryUtil;
+import org.junit.Test;
+
+public class IndexExpressionIT extends BaseHBaseManagedTimeIT {
+
+    private static final int NUM_MILLIS_IN_DAY = 86400000;
+
+    @Test
+    public void testImmutableIndexCreationAndUpdate() throws Exception {
+        helpTestCreateAndUpdate(false, false);
+    }
+
+    @Test
+    public void testImmutableLocalIndexCreationAndUpdate() throws Exception {
+        helpTestCreateAndUpdate(false, true);
+    }
+
+    @Test
+    public void testMutableIndexCreationAndUpdate() throws Exception {
+        helpTestCreateAndUpdate(true, false);
+    }
+
+    @Test
+    public void testMutableLocalIndexCreationAndUpdate() throws Exception {
+        helpTestCreateAndUpdate(true, true);
+    }
+
+    /**
+     * Adds a row to the index data table
+     * 
+     * @param i
+     *            row number
+     */
+    private void insertRow(PreparedStatement stmt, int i) throws SQLException {
+        // insert row
+        stmt.setString(1, "varchar" + String.valueOf(i));
+        stmt.setString(2, "char" + String.valueOf(i));
+        stmt.setInt(3, i);
+        stmt.setLong(4, i);
+        stmt.setBigDecimal(5, new BigDecimal(Double.valueOf(i)));
+        Date date = new Date(DateUtil.parseDate("2015-01-01 00:00:00").getTime() + (i - 1) * NUM_MILLIS_IN_DAY);
+        stmt.setDate(6, date);
+        stmt.setString(7, "a.varchar" + String.valueOf(i));
+        stmt.setString(8, "a.char" + String.valueOf(i));
+        stmt.setInt(9, i);
+        stmt.setLong(10, i);
+        stmt.setBigDecimal(11, new BigDecimal((double)i));
+        stmt.setDate(12, date);
+        stmt.setString(13, "b.varchar" + String.valueOf(i));
+        stmt.setString(14, "b.char" + String.valueOf(i));
+        stmt.setInt(15, i);
+        stmt.setLong(16, i);
+        stmt.setBigDecimal(17, new BigDecimal((double)i));
+        stmt.setDate(18, date);
+        stmt.executeUpdate();
+    }
+
+    private void verifyResult(ResultSet rs, int i) throws SQLException {
+        assertTrue(rs.next());
+        assertEquals("VARCHAR" + String.valueOf(i) + "_" + StringUtils.rightPad("CHAR" + String.valueOf(i), 6, ' ')
+                + "_A.VARCHAR" + String.valueOf(i) + "_" + StringUtils.rightPad("B.CHAR" + String.valueOf(i), 10, ' '),
+                rs.getString(1));
+        assertEquals(i * 4, rs.getInt(2));
+        Date date = new Date(DateUtil.parseDate("2015-01-01 00:00:00").getTime() + (i) * NUM_MILLIS_IN_DAY);
+        assertEquals(date, rs.getDate(3));
+        assertEquals(date, rs.getDate(4));
+        assertEquals(date, rs.getDate(5));
+        assertEquals("varchar" + String.valueOf(i), rs.getString(6));
+        assertEquals("char" + String.valueOf(i), rs.getString(7));
+        assertEquals(i, rs.getInt(8));
+        assertEquals(i, rs.getLong(9));
+        assertEquals(i, rs.getDouble(10), 0.000001);
+        assertEquals(i, rs.getLong(11));
+        assertEquals(i, rs.getLong(12));
+    }
+
+    protected void helpTestCreateAndUpdate(boolean mutable, boolean localIndex) throws Exception {
+        String dataTableName = mutable ? MUTABLE_INDEX_DATA_TABLE : INDEX_DATA_TABLE;
+        String fullDataTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTableName;
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.setAutoCommit(false);
+            populateDataTable(conn, dataTableName);
+
+            // create an expression index
+            String ddl = "CREATE "
+                    + (localIndex ? "LOCAL" : "")
+                    + " INDEX IDX ON "
+                    + fullDataTableName
+                    + " ((UPPER(varchar_pk) || '_' || UPPER(char_pk) || '_' || UPPER(varchar_col1) || '_' || UPPER(char_col2)),"
+                    + " (decimal_pk+int_pk+decimal_col2+int_col1)," + " date_pk+1, date1+1, date2+1 )"
+                    + " INCLUDE (long_col1, long_col2)";
+            PreparedStatement stmt = conn.prepareStatement(ddl);
+            stmt.execute();
+
+            // run select query with expression in WHERE clause
+            String whereSql = "SELECT long_col1, long_col2 from "
+                    + fullDataTableName
+                    + " WHERE UPPER(varchar_pk) || '_' || UPPER(char_pk) || '_' || UPPER(varchar_col1) || '_' || UPPER(char_col2) = ?"
+                    + " AND decimal_pk+int_pk+decimal_col2+int_col1=?"
+                    // since a.date1 and b.date2 are NULLABLE and date is fixed width, these expressions are stored as
+                    // DECIMAL in the index (which is not fixed width)
+                    + " AND date_pk+1=? AND date1+1=? AND date2+1=?";
+            stmt = conn.prepareStatement(whereSql);
+            stmt.setString(1, "VARCHAR1_CHAR1 _A.VARCHAR1_B.CHAR1   ");
+            stmt.setInt(2, 4);
+            Date date = DateUtil.parseDate("2015-01-02 00:00:00");
+            stmt.setDate(3, date);
+            stmt.setDate(4, date);
+            stmt.setDate(5, date);
+
+            // verify that the query does a range scan on the index table
+            ResultSet rs = stmt.executeQuery("EXPLAIN " + whereSql);
+            assertEquals(
+                    localIndex ? "CLIENT PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_INDEX_TEST."
+                            + dataTableName
+                            + " [-32768,'VARCHAR1_CHAR1 _A.VARCHAR1_B.CHAR1   ',4,'2015-01-02 00:00:00.000',1,420,156,800,000,1,420,156,800,000]\nCLIENT MERGE SORT"
+                            : "CLIENT PARALLEL 1-WAY RANGE SCAN OVER INDEX_TEST.IDX ['VARCHAR1_CHAR1 _A.VARCHAR1_B.CHAR1   ',4,'2015-01-02 00:00:00.000',1,420,156,800,000,1,420,156,800,000]",
+                    QueryUtil.getExplainPlan(rs));
+
+            // verify that the correct results are returned
+            rs = stmt.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(1, rs.getInt(1));
+            assertEquals(1, rs.getInt(2));
+            assertFalse(rs.next());
+
+            // verify all rows in data table are present in index table
+            String indexSelectSql = "SELECT UPPER(varchar_pk) || '_' || UPPER(char_pk) || '_' || UPPER(varchar_col1) || '_' || UPPER(char_col2), "
+                    + "decimal_pk+int_pk+decimal_col2+int_col1, "
+                    + "date_pk+1, date1+1, date2+1, "
+                    + "varchar_pk, char_pk, int_pk, long_pk, decimal_pk, "
+                    + "long_col1, long_col2 "
+                    + "from "
+                    + fullDataTableName;
+            rs = conn.createStatement().executeQuery("EXPLAIN " + indexSelectSql);
+            assertEquals(localIndex ? "CLIENT PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_" + fullDataTableName
+                    + " [-32768]\nCLIENT MERGE SORT" : "CLIENT PARALLEL 1-WAY FULL SCAN OVER INDEX_TEST.IDX",
+                    QueryUtil.getExplainPlan(rs));
+            rs = conn.createStatement().executeQuery(indexSelectSql);
+            verifyResult(rs, 1);
+            verifyResult(rs, 2);
+
+            // Insert two more rows to the index data table
+            String upsert = "UPSERT INTO " + fullDataTableName
+                    + " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
+            stmt = conn.prepareStatement(upsert);
+            insertRow(stmt, 3);
+            insertRow(stmt, 4);
+            conn.commit();
+
+            rs = conn.createStatement().executeQuery(indexSelectSql);
+            verifyResult(rs, 1);
+            verifyResult(rs, 2);
+            // verify that two rows added after index was created were also added to
+            // the index table
+            verifyResult(rs, 3);
+            verifyResult(rs, 4);
+
+            // update the first row
+            upsert = "UPSERT INTO "
+                    + fullDataTableName
+                    + "(varchar_pk, char_pk, int_pk, long_pk, decimal_pk, date_pk, a.varchar_col1) VALUES(?, ?, ?, ?, ?, ?, ?)";
+
+            stmt = conn.prepareStatement(upsert);
+            stmt.setString(1, "varchar1");
+            stmt.setString(2, "char1");
+            stmt.setInt(3, 1);
+            stmt.setLong(4, 1l);
+            stmt.setBigDecimal(5, new BigDecimal(1.0));
+            stmt.setDate(6, DateUtil.parseDate("2015-01-01 00:00:00"));
+            stmt.setString(7, "a.varchar_updated");
+            stmt.executeUpdate();
+            conn.commit();
+
+            // verify only one row was updated in the data table
+            String selectSql = "UPPER(varchar_pk) || '_' || UPPER(char_pk) || '_' || UPPER(varchar_col1) || '_' || UPPER(char_col2) from "
+                    + fullDataTableName;
+            rs = conn.createStatement().executeQuery("SELECT /*+ NO_INDEX */ " + selectSql);
+            assertTrue(rs.next());
+            assertEquals("VARCHAR1_CHAR1 _A.VARCHAR_UPDATED_B.CHAR1   ", rs.getString(1));
+            assertTrue(rs.next());
+            assertEquals("VARCHAR2_CHAR2 _A.VARCHAR2_B.CHAR2   ", rs.getString(1));
+            assertTrue(rs.next());
+            assertEquals("VARCHAR3_CHAR3 _A.VARCHAR3_B.CHAR3   ", rs.getString(1));
+            assertTrue(rs.next());
+            assertEquals("VARCHAR4_CHAR4 _A.VARCHAR4_B.CHAR4   ", rs.getString(1));
+            assertFalse(rs.next());
+
+            // verify that the rows in the index table are also updated
+            rs = conn.createStatement().executeQuery("SELECT " + selectSql);
+            assertTrue(rs.next());
+            // if the data table is immutable, the index table will have one more
+            // row
+            if (!mutable) {
+                assertEquals("VARCHAR1_CHAR1 _A.VARCHAR1_B.CHAR1   ", rs.getString(1));
+                assertTrue(rs.next());
+            }
+            assertEquals("VARCHAR1_CHAR1 _A.VARCHAR_UPDATED_" + (mutable ? "B.CHAR1   " : ""), rs.getString(1));
+            assertTrue(rs.next());
+            assertEquals("VARCHAR2_CHAR2 _A.VARCHAR2_B.CHAR2   ", rs.getString(1));
+            assertTrue(rs.next());
+            assertEquals("VARCHAR3_CHAR3 _A.VARCHAR3_B.CHAR3   ", rs.getString(1));
+            assertTrue(rs.next());
+            assertEquals("VARCHAR4_CHAR4 _A.VARCHAR4_B.CHAR4   ", rs.getString(1));
+            assertFalse(rs.next());
+            conn.createStatement().execute("DROP INDEX IDX ON " + fullDataTableName);
+        } finally {
+            conn.close();
+        }
+    }
+
+    private void populateDataTable(Connection conn, String dataTable) throws SQLException {
+        ensureTableCreated(getUrl(), dataTable);
+        String upsert = "UPSERT INTO " + INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTable
+                + " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
+        PreparedStatement stmt1 = conn.prepareStatement(upsert);
+        // insert two rows
+        insertRow(stmt1, 1);
+        insertRow(stmt1, 2);
+        conn.commit();
+    }
+
+    @Test
+    public void testDeleteIndexedExpressionImmutableIndex() throws Exception {
+        helpTestDeleteIndexedExpression(false, false);
+    }
+
+    @Test
+    public void testDeleteIndexedExpressionImmutableLocalIndex() throws Exception {
+        helpTestDeleteIndexedExpression(false, true);
+    }
+
+    @Test
+    public void testDeleteIndexedExpressionMutableIndex() throws Exception {
+        helpTestDeleteIndexedExpression(true, false);
+    }
+
+    @Test
+    public void testDeleteIndexedExpressionMutableLocalIndex() throws Exception {
+        helpTestDeleteIndexedExpression(true, true);
+    }
+
+    protected void helpTestDeleteIndexedExpression(boolean mutable, boolean localIndex) throws Exception {
+        String dataTableName = mutable ? MUTABLE_INDEX_DATA_TABLE : INDEX_DATA_TABLE;
+        String fullDataTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTableName;
+        String fullIndexTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + "IDX";
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.setAutoCommit(false);
+            ensureTableCreated(getUrl(), dataTableName);
+            populateDataTable(conn, dataTableName);
+            String ddl = "CREATE " + (localIndex ? "LOCAL" : "") + " INDEX IDX ON " + fullDataTableName
+                    + " (2*long_col2)";
+            PreparedStatement stmt = conn.prepareStatement(ddl);
+            stmt.execute();
+
+            ResultSet rs;
+            rs = conn.createStatement().executeQuery("SELECT COUNT(*) FROM " + fullDataTableName);
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+            rs = conn.createStatement().executeQuery("SELECT COUNT(*) FROM " + fullIndexTableName);
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+
+            conn.setAutoCommit(true);
+            String dml = "DELETE from " + fullDataTableName + " WHERE long_col2 = 2";
+            try {
+                conn.createStatement().execute(dml);
+                if (!mutable) {
+                    fail();
+                }
+            } catch (SQLException e) {
+                if (!mutable) {
+                    assertEquals(SQLExceptionCode.INVALID_FILTER_ON_IMMUTABLE_ROWS.getErrorCode(), e.getErrorCode());
+                }
+            }
+
+            if (!mutable) {
+                dml = "DELETE from " + fullDataTableName + " WHERE 2*long_col2 = 4";
+                conn.createStatement().execute(dml);
+            }
+
+            rs = conn.createStatement().executeQuery("SELECT COUNT(*) FROM " + fullDataTableName);
+            assertTrue(rs.next());
+            assertEquals(1, rs.getInt(1));
+            rs = conn.createStatement().executeQuery("SELECT COUNT(*) FROM " + fullIndexTableName);
+            assertTrue(rs.next());
+            assertEquals(1, rs.getInt(1));
+            conn.createStatement().execute("DROP INDEX IDX ON " + fullDataTableName);
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testDeleteCoveredColImmutableIndex() throws Exception {
+        helpTestDeleteCoveredCol(false, false);
+    }
+
+    @Test
+    public void testDeleteCoveredColImmutableLocalIndex() throws Exception {
+        helpTestDeleteCoveredCol(false, true);
+    }
+
+    @Test
+    public void testDeleteCoveredColMutableIndex() throws Exception {
+        helpTestDeleteCoveredCol(true, false);
+    }
+
+    @Test
+    public void testDeleteCoveredColMutableLocalIndex() throws Exception {
+        helpTestDeleteCoveredCol(true, true);
+    }
+
+    protected void helpTestDeleteCoveredCol(boolean mutable, boolean localIndex) throws Exception {
+        String dataTableName = mutable ? MUTABLE_INDEX_DATA_TABLE : INDEX_DATA_TABLE;
+        String fullDataTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTableName;
+        String fullIndexTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + "IDX";
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.setAutoCommit(false);
+            ensureTableCreated(getUrl(), dataTableName);
+            populateDataTable(conn, dataTableName);
+            String ddl = "CREATE " + (localIndex ? "LOCAL" : "") + " INDEX IDX ON " + fullDataTableName
+                    + " (long_pk, varchar_pk, 1+long_pk, UPPER(varchar_pk) )" + " INCLUDE (long_col1, long_col2)";
+            PreparedStatement stmt = conn.prepareStatement(ddl);
+            stmt.execute();
+
+            ResultSet rs;
+            rs = conn.createStatement().executeQuery("SELECT COUNT(*) FROM " + fullDataTableName);
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+            rs = conn.createStatement().executeQuery("SELECT COUNT(*) FROM " + fullIndexTableName);
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+
+            String dml = "DELETE from " + fullDataTableName + " WHERE long_col2 = 2";
+            assertEquals(1, conn.createStatement().executeUpdate(dml));
+            conn.commit();
+
+            String query = "SELECT /*+ NO_INDEX */ long_pk, varchar_pk, 1+long_pk, UPPER(varchar_pk) FROM "
+                    + fullDataTableName;
+            rs = conn.createStatement().executeQuery(query);
+            assertTrue(rs.next());
+            assertEquals(1L, rs.getLong(1));
+            assertEquals("varchar1", rs.getString(2));
+            assertEquals(2L, rs.getLong(3));
+            assertEquals("VARCHAR1", rs.getString(4));
+            assertFalse(rs.next());
+
+            query = "SELECT long_pk, varchar_pk, 1+long_pk, UPPER(varchar_pk) FROM " + fullDataTableName;
+            rs = conn.createStatement().executeQuery(query);
+            assertTrue(rs.next());
+            assertEquals(1L, rs.getLong(1));
+            assertEquals("varchar1", rs.getString(2));
+            assertEquals(2L, rs.getLong(3));
+            assertEquals("VARCHAR1", rs.getString(4));
+            assertFalse(rs.next());
+
+            query = "SELECT * FROM " + fullIndexTableName;
+            rs = conn.createStatement().executeQuery(query);
+            assertTrue(rs.next());
+
+            assertEquals(1L, rs.getLong(1));
+            assertEquals("varchar1", rs.getString(2));
+            assertEquals(2L, rs.getLong(3));
+            assertEquals("VARCHAR1", rs.getString(4));
+            assertFalse(rs.next());
+            conn.createStatement().execute("DROP INDEX IDX ON " + fullDataTableName);
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testGroupByCountImmutableIndex() throws Exception {
+        helpTestGroupByCount(false, false);
+    }
+
+    @Test
+    public void testGroupByCountImmutableLocalIndex() throws Exception {
+        helpTestGroupByCount(false, true);
+    }
+
+    @Test
+    public void testGroupByCountMutableIndex() throws Exception {
+        helpTestGroupByCount(true, false);
+    }
+
+    @Test
+    public void testGroupByCountMutableLocalIndex() throws Exception {
+        helpTestGroupByCount(true, true);
+    }
+
+    protected void helpTestGroupByCount(boolean mutable, boolean localIndex) throws Exception {
+        String dataTableName = mutable ? MUTABLE_INDEX_DATA_TABLE : INDEX_DATA_TABLE;
+        String fullDataTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTableName;
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.setAutoCommit(false);
+            populateDataTable(conn, dataTableName);
+            String ddl = "CREATE " + (localIndex ? "LOCAL" : "") + " INDEX IDX ON " + fullDataTableName
+                    + " (int_col1+int_col2)";
+            PreparedStatement stmt = conn.prepareStatement(ddl);
+            stmt.execute();
+
+            String groupBySql = "SELECT (int_col1+int_col2), COUNT(*) FROM " + fullDataTableName
+                    + " GROUP BY (int_col1+int_col2)";
+            ResultSet rs = conn.createStatement().executeQuery("EXPLAIN " + groupBySql);
+            String expectedPlan = "CLIENT PARALLEL 1-WAY "
+                    + (localIndex ? "RANGE SCAN OVER _LOCAL_IDX_" + fullDataTableName + " [-32768]"
+                            : "FULL SCAN OVER INDEX_TEST.IDX")
+                    + "\n    SERVER FILTER BY FIRST KEY ONLY\n    SERVER AGGREGATE INTO ORDERED DISTINCT ROWS BY [TO_BIGINT((A.INT_COL1 + B.INT_COL2))]\nCLIENT MERGE SORT";
+            assertEquals(expectedPlan, QueryUtil.getExplainPlan(rs));
+            rs = conn.createStatement().executeQuery(groupBySql);
+            assertTrue(rs.next());
+            assertEquals(1, rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals(1, rs.getInt(2));
+            assertFalse(rs.next());
+            conn.createStatement().execute("DROP INDEX IDX ON " + fullDataTableName);
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testSelectDistinctImmutableIndex() throws Exception {
+        helpTestSelectDistinct(false, false);
+    }
+
+    @Test
+    public void testSelectDistinctImmutableIndexLocal() throws Exception {
+        helpTestSelectDistinct(false, true);
+    }
+
+    @Test
+    public void testSelectDistinctMutableIndex() throws Exception {
+        helpTestSelectDistinct(true, false);
+    }
+
+    @Test
+    public void testSelectDistinctMutableLocalIndex() throws Exception {
+        helpTestSelectDistinct(true, true);
+    }
+
+    protected void helpTestSelectDistinct(boolean mutable, boolean localIndex) throws Exception {
+        String dataTableName = mutable ? MUTABLE_INDEX_DATA_TABLE : INDEX_DATA_TABLE;
+        String fullDataTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTableName;
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.setAutoCommit(false);
+            populateDataTable(conn, dataTableName);
+            String ddl = "CREATE " + (localIndex ? "LOCAL" : "") + " INDEX IDX ON " + fullDataTableName
+                    + " (int_col1+1)";
+            PreparedStatement stmt = conn.prepareStatement(ddl);
+            stmt.execute();
+            String sql = "SELECT distinct int_col1+1 FROM " + fullDataTableName + " where int_col1+1 > 0";
+            ResultSet rs = conn.createStatement().executeQuery("EXPLAIN " + sql);
+            String expectedPlan = "CLIENT PARALLEL 1-WAY RANGE SCAN OVER "
+                    + (localIndex ? "_LOCAL_IDX_" + fullDataTableName + " [-32768,0] - [-32768,*]"
+                            : "INDEX_TEST.IDX [0] - [*]")
+                    + "\n    SERVER FILTER BY FIRST KEY ONLY\n    SERVER AGGREGATE INTO ORDERED DISTINCT ROWS BY [TO_BIGINT((A.INT_COL1 + 1))]\nCLIENT MERGE SORT";
+            assertEquals(expectedPlan, QueryUtil.getExplainPlan(rs));
+            rs = conn.createStatement().executeQuery(sql);
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(3, rs.getInt(1));
+            assertFalse(rs.next());
+            conn.createStatement().execute("DROP INDEX IDX ON " + fullDataTableName);
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testInClauseWithImmutableIndex() throws Exception {
+        helpTestInClauseWithIndex(false, false);
+    }
+
+    @Test
+    public void testInClauseWithImmutableLocalIndex() throws Exception {
+        helpTestInClauseWithIndex(false, true);
+    }
+
+    @Test
+    public void testInClauseWithMutableIndex() throws Exception {
+        helpTestInClauseWithIndex(true, false);
+    }
+
+    @Test
+    public void testInClauseWithMutableLocalIndex() throws Exception {
+        helpTestInClauseWithIndex(true, false);
+    }
+
+    protected void helpTestInClauseWithIndex(boolean mutable, boolean localIndex) throws Exception {
+        String dataTableName = mutable ? MUTABLE_INDEX_DATA_TABLE : INDEX_DATA_TABLE;
+        String fullDataTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTableName;
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.setAutoCommit(false);
+            populateDataTable(conn, dataTableName);
+            String ddl = "CREATE " + (localIndex ? "LOCAL" : "") + " INDEX IDX ON " + fullDataTableName
+                    + " (int_col1+1)";
+
+            conn = DriverManager.getConnection(getUrl(), props);
+            conn.setAutoCommit(false);
+            PreparedStatement stmt = conn.prepareStatement(ddl);
+            stmt.execute();
+            String sql = "SELECT int_col1+1 FROM " + fullDataTableName + " where int_col1+1 IN (2)";
+            ResultSet rs = conn.createStatement().executeQuery("EXPLAIN " + sql);
+            assertEquals("CLIENT PARALLEL 1-WAY RANGE SCAN OVER "
+                    + (localIndex ? "_LOCAL_IDX_" + fullDataTableName + " [-32768,2]\n    SERVER FILTER BY FIRST KEY ONLY\nCLIENT MERGE SORT"
+                            : "INDEX_TEST.IDX [2]\n    SERVER FILTER BY FIRST KEY ONLY"), QueryUtil.getExplainPlan(rs));
+            rs = conn.createStatement().executeQuery(sql);
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+            assertFalse(rs.next());
+            conn.createStatement().execute("DROP INDEX IDX ON " + fullDataTableName);
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testOrderByWithImmutableIndex() throws Exception {
+        helpTestOrderByWithIndex(false, false);
+    }
+
+    @Test
+    public void testOrderByWithImmutableLocalIndex() throws Exception {
+        helpTestOrderByWithIndex(false, true);
+    }
+
+    @Test
+    public void testOrderByWithMutableIndex() throws Exception {
+        helpTestOrderByWithIndex(true, false);
+    }
+
+    @Test
+    public void testOrderByWithMutableLocalIndex() throws Exception {
+        helpTestOrderByWithIndex(true, false);
+    }
+
+    protected void helpTestOrderByWithIndex(boolean mutable, boolean localIndex) throws Exception {
+        String dataTableName = mutable ? MUTABLE_INDEX_DATA_TABLE : INDEX_DATA_TABLE;
+        String fullDataTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTableName;
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.setAutoCommit(false);
+            populateDataTable(conn, dataTableName);
+            String ddl = "CREATE " + (localIndex ? "LOCAL" : "") + " INDEX IDX ON " + fullDataTableName
+                    + " (int_col1+1)";
+
+            conn = DriverManager.getConnection(getUrl(), props);
+            conn.setAutoCommit(false);
+            PreparedStatement stmt = conn.prepareStatement(ddl);
+            stmt.execute();
+            String sql = "SELECT int_col1+1 FROM " + fullDataTableName + " ORDER BY int_col1+1";
+            ResultSet rs = conn.createStatement().executeQuery("EXPLAIN " + sql);
+            assertEquals("CLIENT PARALLEL 1-WAY "
+                    + (localIndex ? "RANGE SCAN OVER _LOCAL_IDX_" + fullDataTableName
+                            + " [-32768]\n    SERVER FILTER BY FIRST KEY ONLY\nCLIENT MERGE SORT"
+                            : "FULL SCAN OVER INDEX_TEST.IDX\n    SERVER FILTER BY FIRST KEY ONLY"),
+                    QueryUtil.getExplainPlan(rs));
+            rs = conn.createStatement().executeQuery(sql);
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(3, rs.getInt(1));
+            assertFalse(rs.next());
+            conn.createStatement().execute("DROP INDEX IDX ON " + fullDataTableName);
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testSelectColOnlyInDataTableImmutableIndex() throws Exception {
+        helpTestSelectColOnlyInDataTable(false, false);
+    }
+
+    @Test
+    public void testSelectColOnlyInDataTableImmutableLocalIndex() throws Exception {
+        helpTestSelectColOnlyInDataTable(false, true);
+    }
+
+    @Test
+    public void testSelectColOnlyInDataTableMutableIndex() throws Exception {
+        helpTestSelectColOnlyInDataTable(true, false);
+    }
+
+    @Test
+    public void testSelectColOnlyInDataTableMutableLocalIndex() throws Exception {
+        helpTestSelectColOnlyInDataTable(true, false);
+    }
+
+    protected void helpTestSelectColOnlyInDataTable(boolean mutable, boolean localIndex) throws Exception {
+        String dataTableName = mutable ? MUTABLE_INDEX_DATA_TABLE : INDEX_DATA_TABLE;
+        String fullDataTableName = INDEX_DATA_SCHEMA + QueryConstants.NAME_SEPARATOR + dataTableName;
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.setAutoCommit(false);
+            populateDataTable(conn, dataTableName);
+            String ddl = "CREATE " + (localIndex ? "LOCAL" : "") + " INDEX IDX ON " + fullDataTableName
+                    + " (int_col1+1)";
+
+            conn = DriverManager.getConnection(getUrl(), props);
+            conn.setAutoCommit(false);
+            PreparedStatement stmt = conn.prepareStatement(ddl);
+            stmt.execute();
+            String sql = "SELECT int_col1+1, int_col2 FROM " + fullDataTableName + " WHERE int_col1+1=2";
+            ResultSet rs = conn.createStatement().executeQuery("EXPLAIN " + sql);
+            assertEquals("CLIENT PARALLEL 1-WAY "
+                    + (localIndex ? "RANGE SCAN OVER _LOCAL_IDX_" + fullDataTableName
+                            + " [-32768,2]\n    SERVER FILTER BY FIRST KEY ONLY\nCLIENT MERGE SORT" : "FULL SCAN OVER "
+                            + fullDataTableName + "\n    SERVER FILTER BY (A.INT_COL1 + 1) = 2"),
+                    QueryUtil.getExplainPlan(rs));
+            rs = conn.createStatement().executeQuery(sql);
+            assertTrue(rs.next());
+            assertEquals(2, rs.getInt(1));
+            assertEquals(1, rs.getInt(2));
+            assertFalse(rs.next());
+            conn.createStatement().execute("DROP INDEX IDX ON " + fullDataTableName);
+        } finally {
+            conn.close();
+        }
+    }
+    
+    @Test
+    public void testImmutableIndexWithCaseSensitiveCols() throws Exception {
+        helpTestIndexWithCaseSensitiveCols(false, false);
+    }
+    
+    @Test
+    public void testImmutableLocalIndexWithCaseSensitiveCols() throws Exception {
+        helpTestIndexWithCaseSensitiveCols(true, false);
+    }
+    
+    @Test
+    public void testMutableIndexWithCaseSensitiveCols() throws Exception {
+        helpTestIndexWithCaseSensitiveCols(true, false);
+    }
+    
+    @Test
+    public void testMutableLocalIndexWithCaseSensitiveCols() throws Exception {
+        helpTestIndexWithCaseSensitiveCols(true, false);
+    }
+    
+    protected void helpTestIndexWithCaseSensitiveCols(boolean mutable, boolean localIndex) throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        try {
+            conn.createStatement().execute("CREATE TABLE cs (k VARCHAR NOT NULL PRIMARY KEY, \"V1\" VARCHAR, \"v2\" VARCHAR) "+ (mutable ? "IMMUTABLE_ROWS=true" : ""));
+            String query = "SELECT * FROM cs";
+            ResultSet rs = conn.createStatement().executeQuery(query);
+            assertFalse(rs.next());
+            if (localIndex) {
+                conn.createStatement().execute("CREATE LOCAL INDEX ics ON cs (\"v2\" || '_modified') INCLUDE (\"V1\",\"v2\")");
+            } else {
+                conn.createStatement().execute("CREATE INDEX ics ON cs (\"V1\" || '_' || \"v2\") INCLUDE (\"V1\",\"v2\")");
+            }
+            query = "SELECT * FROM ics";
+            rs = conn.createStatement().executeQuery(query);
+            assertFalse(rs.next());
+
+            PreparedStatement stmt = conn.prepareStatement("UPSERT INTO cs VALUES(?,?,?)");
+            stmt.setString(1,"a");
+            stmt.setString(2, "x");
+            stmt.setString(3, "1");
+            stmt.execute();
+            stmt.setString(1,"b");
+            stmt.setString(2, "y");
+            stmt.setString(3, "2");
+            stmt.execute();
+            conn.commit();
+
+            //TODO FIX THIS change this to *
+            query = "SELECT (\"V1\" || '_' || \"v2\"), k, \"V1\", \"v2\"  FROM cs WHERE (\"V1\" || '_' || \"v2\") = 'x_1'";
+            rs = conn.createStatement().executeQuery("EXPLAIN " + query);
+            if(localIndex){
+                assertEquals("CLIENT PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_CS [-32768,'x_1']\n"
+                           + "CLIENT MERGE SORT", QueryUtil.getExplainPlan(rs));
+            } else {
+                assertEquals("CLIENT PARALLEL 1-WAY RANGE SCAN OVER ICS ['x_1']", QueryUtil.getExplainPlan(rs));
+            }
+
+            rs = conn.createStatement().executeQuery(query);
+            assertTrue(rs.next());
+            assertEquals("x_1",rs.getString(1));
+            assertEquals("a",rs.getString(2));
+            assertEquals("x",rs.getString(3));
+            assertEquals("1",rs.getString(4));
+            //TODO figure out why this " " is needed
+            assertEquals("x_1",rs.getString("\"('V1' || '_' || 'v2')\""));
+            assertEquals("a",rs.getString("k"));
+            assertEquals("x",rs.getString("V1"));
+            assertEquals("1",rs.getString("v2"));
+            assertFalse(rs.next());
+
+            query = "SELECT \"V1\", \"V1\" as foo1, (\"V1\" || '_' || \"v2\") as foo, (\"V1\" || '_' || \"v2\") as \"Foo1\", (\"V1\" || '_' || \"v2\") FROM cs ORDER BY foo";
+            rs = conn.createStatement().executeQuery("EXPLAIN " + query);
+            if(localIndex){
+                assertEquals("CLIENT PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_CS [-32768]\nCLIENT MERGE SORT",
+                    QueryUtil.getExplainPlan(rs));
+            } else {
+                assertEquals("CLIENT PARALLEL 1-WAY FULL SCAN OVER ICS", QueryUtil.getExplainPlan(rs));
+            }
+
+            rs = conn.createStatement().executeQuery(query);
+            assertTrue(rs.next());
+            assertEquals("x",rs.getString(1));
+            assertEquals("x",rs.getString("V1"));
+            assertEquals("x",rs.getString(2));
+            assertEquals("x",rs.getString("foo1"));
+            assertEquals("x_1",rs.getString(3));
+            assertEquals("x_1",rs.getString("Foo"));
+            assertEquals("x_1",rs.getString(4));
+            assertEquals("x_1",rs.getString("Foo1"));
+            assertEquals("x_1",rs.getString(5));
+            assertEquals("x_1",rs.getString("\"('V1' || '_' || 'v2')\""));
+            assertTrue(rs.next());
+            assertEquals("y",rs.getString(1));
+            assertEquals("y",rs.getString("V1"));
+            assertEquals("y",rs.getString(2));
+            assertEquals("y",rs.getString("foo1"));
+            assertEquals("y_2",rs.getString(3));
+            assertEquals("y_2",rs.getString("Foo"));
+            assertEquals("y_2",rs.getString(4));
+            assertEquals("y_2",rs.getString("Foo1"));
+            assertEquals("y_2",rs.getString(5));
+            assertEquals("y_2",rs.getString("\"('V1' || '_' || 'v2')\""));
+            assertFalse(rs.next());
+            conn.createStatement().execute("DROP INDEX ICS ON CS");
+        } finally {
+            conn.close();
+        }
+    }
+    
+    @Test
+    public void testImmutableIndexDropIndexedColumn() throws Exception {
+        helpTestDropIndexedColumn(false, false);
+    }
+    
+    @Test
+    public void testImmutableLocalIndexDropIndexedColumn() throws Exception {
+        helpTestDropIndexedColumn(false, true);
+    }
+    
+    @Test
+    public void testMutableIndexDropIndexedColumn() throws Exception {
+        helpTestDropIndexedColumn(true, false);
+    }
+    
+    @Test
+    public void testMutableLocalIndexDropIndexedColumn() throws Exception {
+        helpTestDropIndexedColumn(true, true);
+    }
+    
+    public void helpTestDropIndexedColumn(boolean mutable, boolean local) throws Exception {
+        String query;
+        ResultSet rs;
+        PreparedStatement stmt;
+
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        // make sure that the tables are empty, but reachable
+        conn.createStatement().execute(
+          "CREATE TABLE t (k VARCHAR NOT NULL PRIMARY KEY, v1 VARCHAR, v2 VARCHAR)");
+        query = "SELECT * FROM t" ;
+        rs = conn.createStatement().executeQuery(query);
+        assertFalse(rs.next());
+        String indexName = "it_" + (mutable ? "m" : "im") + "_" + (local ? "l" : "h");
+        conn.createStatement().execute("CREATE " + ( local ? "LOCAL" : "") + " INDEX " + indexName + " ON t (v1 || '_' || v2)");
+
+        query = "SELECT * FROM t";
+        rs = conn.createStatement().executeQuery(query);
+        assertFalse(rs.next());
+
+        // load some data into the table
+        stmt = conn.prepareStatement("UPSERT INTO t VALUES(?,?,?)");
+        stmt.setString(1, "a");
+        stmt.setString(2, "x");
+        stmt.setString(3, "1");
+        stmt.execute();
+        conn.commit();
+
+        assertIndexExists(conn,true);
+        conn.createStatement().execute("ALTER TABLE t DROP COLUMN v1");
+        assertIndexExists(conn,false);
+
+        query = "SELECT * FROM t";
+        rs = conn.createStatement().executeQuery(query);
+        assertTrue(rs.next());
+        assertEquals("a",rs.getString(1));
+        assertEquals("1",rs.getString(2));
+        assertFalse(rs.next());
+
+        // load some data into the table
+        stmt = conn.prepareStatement("UPSERT INTO t VALUES(?,?)");
+        stmt.setString(1, "a");
+        stmt.setString(2, "2");
+        stmt.execute();
+        conn.commit();
+
+        query = "SELECT * FROM t";
+        rs = conn.createStatement().executeQuery(query);
+        assertTrue(rs.next());
+        assertEquals("a",rs.getString(1));
+        assertEquals("2",rs.getString(2));
+        assertFalse(rs.next());
+    }
+    
+    private static void assertIndexExists(Connection conn, boolean exists) throws SQLException {
+        ResultSet rs = conn.getMetaData().getIndexInfo(null, null, "T", false, false);
+        assertEquals(exists, rs.next());
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexMetadataIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexMetadataIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexMetadataIT.java
index 88e7340..d6ced3c 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexMetadataIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/IndexMetadataIT.java
@@ -100,7 +100,7 @@ public class IndexMetadataIT extends BaseHBaseManagedTimeIT {
         String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
         conn.createStatement().executeQuery("SELECT count(*) FROM " + fullTableName).next(); // client side cache will update
         PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
-        pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), fullTableName)).getIndexMaintainers(ptr);
+        pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), fullTableName)).getIndexMaintainers(ptr, pconn);
         assertTrue(ptr.getLength() > 0);
     }
     
@@ -109,7 +109,7 @@ public class IndexMetadataIT extends BaseHBaseManagedTimeIT {
         String fullTableName = SchemaUtil.getTableName(schemaName, tableName);
         conn.createStatement().executeQuery("SELECT count(*) FROM " + fullTableName).next(); // client side cache will update
         PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
-        pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), fullTableName)).getIndexMaintainers(ptr);
+        pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), fullTableName)).getIndexMaintainers(ptr, pconn);
         assertTrue(ptr.getLength() == 0);
     }
     
@@ -135,8 +135,9 @@ public class IndexMetadataIT extends BaseHBaseManagedTimeIT {
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX", 5, ":CHAR_PK", Order.ASC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX", 6, ":LONG_PK", Order.DESC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX", 7, ":DECIMAL_PK", Order.ASC);
-            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX", 8, "A:INT_COL1", null);
-            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX", 9, "B:INT_COL2", null);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX", 8, ":DATE_PK", Order.ASC);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX", 9, "A:INT_COL1", null);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX", 10, "B:INT_COL2", null);
             assertFalse(rs.next());
             
             rs = conn.getMetaData().getTables(null, StringUtil.escapeLike(INDEX_DATA_SCHEMA), StringUtil.escapeLike("IDX"), new String[] {PTableType.INDEX.getValue().getString() });
@@ -245,8 +246,9 @@ public class IndexMetadataIT extends BaseHBaseManagedTimeIT {
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX1", 5, ":CHAR_PK", Order.ASC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX1", 6, ":LONG_PK", Order.DESC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX1", 7, ":DECIMAL_PK", Order.ASC);
-            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX1", 8, "A:INT_COL1", null);
-            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX1", 9, "B:INT_COL2", null);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX1", 8, ":DATE_PK", Order.ASC);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX1", 9, "A:INT_COL1", null);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX1", 10, "B:INT_COL2", null);
 
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX2", 1, "A:VARCHAR_COL1", Order.ASC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX2", 2, "B:VARCHAR_COL2", Order.ASC);
@@ -255,7 +257,8 @@ public class IndexMetadataIT extends BaseHBaseManagedTimeIT {
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX2", 5, ":CHAR_PK", Order.ASC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX2", 6, ":LONG_PK", Order.DESC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX2", 7, ":DECIMAL_PK", Order.ASC);
-            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX2", 8, "B:INT_COL2", null);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX2", 8, ":DATE_PK", Order.ASC);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, MUTABLE_INDEX_DATA_TABLE, "IDX2", 9, "B:INT_COL2", null);
             assertFalse(rs.next());
             
             // Create another table in the same schema
@@ -307,7 +310,8 @@ public class IndexMetadataIT extends BaseHBaseManagedTimeIT {
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, INDEX_DATA_TABLE, "IDX", 6, ":INT_PK", Order.ASC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, INDEX_DATA_TABLE, "IDX", 7, ":LONG_PK", Order.DESC);
             assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, INDEX_DATA_TABLE, "IDX", 8, ":DECIMAL_PK", Order.ASC);
-            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, INDEX_DATA_TABLE, "IDX", 9, "A:INT_COL1", null);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, INDEX_DATA_TABLE, "IDX", 9, ":DATE_PK", Order.ASC);
+            assertIndexInfoMetadata(rs, INDEX_DATA_SCHEMA, INDEX_DATA_TABLE, "IDX", 10, "A:INT_COL1", null);
             assertFalse(rs.next());
             
             rs = IndexTestUtil.readDataTableIndexRow(conn, INDEX_DATA_SCHEMA, INDEX_DATA_TABLE, "IDX");

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/antlr3/PhoenixSQL.g
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g b/phoenix-core/src/main/antlr3/PhoenixSQL.g
index bcf26be..fad5fb3 100644
--- a/phoenix-core/src/main/antlr3/PhoenixSQL.g
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -393,11 +393,11 @@ create_view_node returns [CreateTableStatement ret]
 // Parse a create index statement.
 create_index_node returns [CreateIndexStatement ret]
     :   CREATE l=LOCAL? INDEX (IF NOT ex=EXISTS)? i=index_name ON t=from_table_name
-        (LPAREN pk=index_pk_constraint RPAREN)
+        (LPAREN ik=ik_constraint RPAREN)
         (INCLUDE (LPAREN icrefs=column_names RPAREN))?
         (p=fam_properties)?
         (SPLIT ON v=value_expression_list)?
-        {ret = factory.createIndex(i, factory.namedTable(null,t), pk, icrefs, v, p, ex!=null, l==null ? IndexType.getDefault() : IndexType.LOCAL, getBindCount()); }
+        {ret = factory.createIndex(i, factory.namedTable(null,t), ik, icrefs, v, p, ex!=null, l==null ? IndexType.getDefault() : IndexType.LOCAL, getBindCount()); }
     ;
 
 // Parse a create sequence statement.
@@ -436,17 +436,17 @@ col_name_with_sort_order returns [Pair<ColumnName, SortOrder> ret]
     :   f=identifier (order=ASC|order=DESC)? {$ret = Pair.newPair(factory.columnName(f), order == null ? SortOrder.getDefault() : SortOrder.fromDDLValue(order.getText()));}
 ;
 
-index_pk_constraint returns [PrimaryKeyConstraint ret]
-    :   cols = col_def_name_with_sort_order_list {$ret = factory.primaryKey(null, cols); }
-    ;
+ik_constraint returns [IndexKeyConstraint ret]
+    :   x = expression_with_sort_order_list {$ret = factory.indexKey(x); }
+;
 
-col_def_name_with_sort_order_list returns [List<Pair<ColumnName, SortOrder>> ret]
-@init{ret = new ArrayList<Pair<ColumnName, SortOrder>>(); }
-    :   p=col_def_name_with_sort_order {$ret.add(p);}  (COMMA p = col_def_name_with_sort_order {$ret.add(p);} )*
+expression_with_sort_order_list returns [List<Pair<ParseNode, SortOrder>> ret]
+@init{ret = new ArrayList<Pair<ParseNode, SortOrder>>(); }
+    :   p=expression_with_sort_order {$ret.add(p);}  (COMMA p = expression_with_sort_order {$ret.add(p);} )*
 ;
 
-col_def_name_with_sort_order returns [Pair<ColumnName, SortOrder> ret]
-    :   c=column_name (order=ASC|order=DESC)? {$ret = Pair.newPair(c, order == null ? SortOrder.getDefault() : SortOrder.fromDDLValue(order.getText()));}
+expression_with_sort_order returns [Pair<ParseNode, SortOrder> ret]
+    :   (x=expression) (order=ASC|order=DESC)? {$ret = Pair.newPair(x, order == null ? SortOrder.getDefault() : SortOrder.fromDDLValue(order.getText()));}
 ;
 
 fam_properties returns [ListMultimap<String,Pair<String,Object>> ret]
@@ -780,7 +780,9 @@ term returns [ParseNode ret]
     |   field=identifier LPAREN l=zero_or_more_expressions RPAREN wg=(WITHIN GROUP LPAREN ORDER BY l2=one_or_more_expressions (a=ASC | DESC) RPAREN)?
         {
             FunctionParseNode f = wg==null ? factory.function(field, l) : factory.function(field,l,l2,a!=null);
-            contextStack.peek().setAggregate(f.isAggregate());
+            if (!contextStack.isEmpty()) {
+            	contextStack.peek().setAggregate(f.isAggregate());
+            }
             $ret = f;
         } 
     |   field=identifier LPAREN t=ASTERISK RPAREN 
@@ -789,13 +791,17 @@ term returns [ParseNode ret]
                 throwRecognitionException(t); 
             }
             FunctionParseNode f = factory.function(field, LiteralParseNode.STAR);
-            contextStack.peek().setAggregate(f.isAggregate()); 
+            if (!contextStack.isEmpty()) {
+            	contextStack.peek().setAggregate(f.isAggregate());
+            }
             $ret = f;
         } 
     |   field=identifier LPAREN t=DISTINCT l=zero_or_more_expressions RPAREN 
         {
             FunctionParseNode f = factory.functionDistinct(field, l);
-            contextStack.peek().setAggregate(f.isAggregate());
+            if (!contextStack.isEmpty()) {
+            	contextStack.peek().setAggregate(f.isAggregate());
+            }
             $ret = f;
         }
     |   e=case_statement { $ret = e; }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReaderGenerator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReaderGenerator.java b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReaderGenerator.java
index f213d2d..718f820 100644
--- a/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReaderGenerator.java
+++ b/phoenix-core/src/main/java/org/apache/hadoop/hbase/regionserver/IndexHalfStoreFileReaderGenerator.java
@@ -57,8 +57,8 @@ import org.apache.phoenix.schema.MetaDataClient;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PIndexState;
 import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.PTable.IndexType;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.util.ByteUtil;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.MetaDataUtil;
@@ -144,7 +144,7 @@ public class IndexHalfStoreFileReaderGenerator extends BaseRegionObserver {
                         new HashMap<ImmutableBytesWritable, IndexMaintainer>();
                 for (PTable index : indexes) {
                     if (index.getIndexType() == IndexType.LOCAL) {
-                        IndexMaintainer indexMaintainer = index.getIndexMaintainer(dataTable);
+                        IndexMaintainer indexMaintainer = index.getIndexMaintainer(dataTable, conn);
                         indexMaintainers.put(new ImmutableBytesWritable(MetaDataUtil
                                 .getViewIndexIdDataType().toBytes(index.getViewIndexId())),
                             indexMaintainer);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index 0a2ee38..322d24a 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -65,7 +65,6 @@ import org.apache.phoenix.schema.MetaDataClient;
 import org.apache.phoenix.schema.MetaDataEntityNotFoundException;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PIndexState;
-import org.apache.phoenix.schema.types.PLong;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PRow;
 import org.apache.phoenix.schema.PTable;
@@ -75,6 +74,7 @@ import org.apache.phoenix.schema.ReadOnlyTableException;
 import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.schema.types.PLong;
 import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.ScanUtil;
 
@@ -483,7 +483,7 @@ public class DeleteCompiler {
                     public MutationState execute() throws SQLException {
                         // TODO: share this block of code with UPSERT SELECT
                         ImmutableBytesWritable ptr = context.getTempPtr();
-                        tableRef.getTable().getIndexMaintainers(ptr);
+                        tableRef.getTable().getIndexMaintainers(ptr, context.getConnection());
                         ServerCache cache = null;
                         try {
                             if (ptr.getLength() > 0) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
index 95e145c..97818e6 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/ExpressionCompiler.java
@@ -135,6 +135,7 @@ public class ExpressionCompiler extends UnsupportedAllParseNodeVisitor<Expressio
     protected final StatementContext context;
     protected final GroupBy groupBy;
     private int nodeCount;
+    private int totalNodeCount;
     private final boolean resolveViewConstants;
 
     public ExpressionCompiler(StatementContext context) {
@@ -166,6 +167,7 @@ public class ExpressionCompiler extends UnsupportedAllParseNodeVisitor<Expressio
     public void reset() {
         this.isAggregate = false;
         this.nodeCount = 0;
+        this.totalNodeCount = 0;
     }
 
     @Override
@@ -420,6 +422,7 @@ public class ExpressionCompiler extends UnsupportedAllParseNodeVisitor<Expressio
     @Override
     public void addElement(List<Expression> l, Expression element) {
         nodeCount--;
+        totalNodeCount++;
         l.add(element);
     }
 
@@ -553,7 +556,7 @@ public class ExpressionCompiler extends UnsupportedAllParseNodeVisitor<Expressio
                 expr =  CastParseNode.convertToRoundExpressionIfNeeded(fromDataType, targetDataType, children);
             }
         }
-        return CoerceExpression.create(expr, targetDataType, SortOrder.getDefault(), expr.getMaxLength());  
+        return wrapGroupByExpression(CoerceExpression.create(expr, targetDataType, SortOrder.getDefault(), expr.getMaxLength()));  
     }
     
    @Override
@@ -1254,4 +1257,8 @@ public class ExpressionCompiler extends UnsupportedAllParseNodeVisitor<Expressio
         Object result = context.getSubqueryResult(node.getSelectNode());
         return LiteralExpression.newConstant(result);
     }
+    
+    public int getTotalNodeCount() {
+        return totalNodeCount;
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
index 0163082..1a605b7 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
@@ -184,7 +184,7 @@ public class FromCompiler {
             Expression sourceExpression = projector.getColumnProjector(column.getPosition()).getExpression();
             PColumnImpl projectedColumn = new PColumnImpl(column.getName(), column.getFamilyName(),
                     sourceExpression.getDataType(), sourceExpression.getMaxLength(), sourceExpression.getScale(), sourceExpression.isNullable(),
-                    column.getPosition(), sourceExpression.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced());
+                    column.getPosition(), sourceExpression.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), column.getExpressionStr());
             projectedColumns.add(projectedColumn);
             sourceExpressions.add(sourceExpression);
         }
@@ -391,7 +391,7 @@ public class FromCompiler {
                         familyName = PNameFactory.newName(family);
                     }
                     allcolumns.add(new PColumnImpl(name, familyName, dynColumn.getDataType(), dynColumn.getMaxLength(),
-                            dynColumn.getScale(), dynColumn.isNull(), position, dynColumn.getSortOrder(), dynColumn.getArraySize(), null, false));
+                            dynColumn.getScale(), dynColumn.isNull(), position, dynColumn.getSortOrder(), dynColumn.getArraySize(), null, false, dynColumn.getExpression()));
                     position++;
                 }
                 theTable = PTableImpl.makePTable(theTable, allcolumns);
@@ -469,7 +469,7 @@ public class FromCompiler {
                 }
                 PColumnImpl column = new PColumnImpl(PNameFactory.newName(alias),
                         PNameFactory.newName(QueryConstants.DEFAULT_COLUMN_FAMILY),
-                        null, 0, 0, true, position++, SortOrder.ASC, null, null, false);
+                        null, 0, 0, true, position++, SortOrder.ASC, null, null, false, null);
                 columns.add(column);
             }
             PTable t = PTableImpl.makePTable(null, PName.EMPTY_NAME, PName.EMPTY_NAME,

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/HavingCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/HavingCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/HavingCompiler.java
index 0cd6ecf..224a9b4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/HavingCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/HavingCompiler.java
@@ -35,8 +35,8 @@ import org.apache.phoenix.parse.ParseNode;
 import org.apache.phoenix.parse.SelectStatement;
 import org.apache.phoenix.parse.SelectStatementRewriter;
 import org.apache.phoenix.schema.ColumnRef;
-import org.apache.phoenix.schema.types.PBoolean;
 import org.apache.phoenix.schema.TypeMismatchException;
+import org.apache.phoenix.schema.types.PBoolean;
 
 
 public class HavingCompiler {
@@ -171,7 +171,7 @@ public class HavingCompiler {
         @Override
         public Void visit(ColumnParseNode node) throws SQLException {
             ColumnRef ref = context.getResolver().resolveColumn(node.getSchemaName(), node.getTableName(), node.getName());
-            boolean isAggregateColumn = groupBy.getExpressions().indexOf(ref.newColumnExpression()) >= 0;
+            boolean isAggregateColumn = groupBy.getExpressions().indexOf(ref.newColumnExpression(node.isTableNameCaseSensitive(), node.isCaseSensitive())) >= 0;
             if (hasOnlyAggregateColumns == null) {
                 hasOnlyAggregateColumns = isAggregateColumn;
             } else {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/IndexExpressionCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/IndexExpressionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/IndexExpressionCompiler.java
new file mode 100644
index 0000000..b4a4168
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/IndexExpressionCompiler.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
+ * file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
+ * License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by
+ * applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language
+ * governing permissions and limitations under the License.
+ */
+package org.apache.phoenix.compile;
+
+import java.sql.SQLException;
+
+import org.apache.phoenix.expression.ColumnExpression;
+import org.apache.phoenix.parse.ColumnParseNode;
+import org.apache.phoenix.schema.ColumnRef;
+
+/**
+ * Used to check if the expression being compiled is a {@link ColumnExpression}
+ */
+public class IndexExpressionCompiler extends ExpressionCompiler {
+
+    //
+    private ColumnRef columnRef;
+
+    public IndexExpressionCompiler(StatementContext context) {
+        super(context);
+        this.columnRef = null;
+    }
+
+    @Override
+    public void reset() {
+        super.reset();
+        this.columnRef = null;
+    }
+
+    @Override
+    protected ColumnRef resolveColumn(ColumnParseNode node) throws SQLException {
+        ColumnRef columnRef = super.resolveColumn(node);
+        if (isTopLevel()) {
+            this.columnRef = columnRef;
+        }
+        return columnRef;
+    }
+
+    /**
+     * @return if the expression being compiled is a regular column the column ref, else is null
+     */
+    public ColumnRef getColumnRef() {
+        return columnRef;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
index 445edd8..c29ea23 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
@@ -788,7 +788,7 @@ public class JoinCompiler {
             PName name = PNameFactory.newName(aliasedName);
             PColumnImpl column = new PColumnImpl(name, familyName, sourceColumn.getDataType(),
                     sourceColumn.getMaxLength(), sourceColumn.getScale(), sourceColumn.isNullable(),
-                    position, sourceColumn.getSortOrder(), sourceColumn.getArraySize(), sourceColumn.getViewConstant(), sourceColumn.isViewReferenced());
+                    position, sourceColumn.getSortOrder(), sourceColumn.getArraySize(), sourceColumn.getViewConstant(), sourceColumn.isViewReferenced(), sourceColumn.getExpressionStr());
             Expression sourceExpression = isLocalIndexColumnRef ?
                       NODE_FACTORY.column(TableName.create(schemaName, tableName), "\"" + colName + "\"", null).accept(new ExpressionCompiler(context))
                     : new ColumnRef(tableRef, sourceColumn.getPosition()).newColumnExpression();
@@ -807,7 +807,7 @@ public class JoinCompiler {
                 Expression sourceExpression = rowProjector.getColumnProjector(column.getPosition()).getExpression();
                 PColumnImpl projectedColumn = new PColumnImpl(PNameFactory.newName(colName), PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY),
                         sourceExpression.getDataType(), sourceExpression.getMaxLength(), sourceExpression.getScale(), sourceExpression.isNullable(),
-                        column.getPosition(), sourceExpression.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced());
+                        column.getPosition(), sourceExpression.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), column.getExpressionStr());
                 projectedColumns.add(projectedColumn);
                 sourceExpressions.add(sourceExpression);
             }
@@ -1367,7 +1367,7 @@ public class JoinCompiler {
                     } else {
                         PColumnImpl column = new PColumnImpl(c.getName(), c.getFamilyName(), c.getDataType(),
                                 c.getMaxLength(), c.getScale(), true, c.getPosition(),
-                                c.getSortOrder(), c.getArraySize(), c.getViewConstant(), c.isViewReferenced());
+                                c.getSortOrder(), c.getArraySize(), c.getViewConstant(), c.isViewReferenced(), c.getExpressionStr());
                         merged.add(column);
                     }
                 }
@@ -1378,7 +1378,7 @@ public class JoinCompiler {
                     PColumnImpl column = new PColumnImpl(c.getName(),
                             PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY), c.getDataType(),
                             c.getMaxLength(), c.getScale(), type == JoinType.Inner ? c.isNullable() : true, position++,
-                            c.getSortOrder(), c.getArraySize(), c.getViewConstant(), c.isViewReferenced());
+                            c.getSortOrder(), c.getArraySize(), c.getViewConstant(), c.isViewReferenced(), c.getExpressionStr());
                     merged.add(column);
                 }
             }


[2/4] phoenix git commit: PHOENIX-514 Support functional indexes (Thomas D'Silva)

Posted by ja...@apache.org.
http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java b/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
index 9c5c2cd..a51723b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
@@ -37,13 +37,15 @@ import org.apache.phoenix.compile.SubqueryRewriter;
 import org.apache.phoenix.iterate.ParallelIteratorFactory;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.parse.AliasedNode;
-import org.apache.phoenix.parse.HintNode;
-import org.apache.phoenix.parse.HintNode.Hint;
 import org.apache.phoenix.parse.AndParseNode;
 import org.apache.phoenix.parse.BooleanParseNodeVisitor;
 import org.apache.phoenix.parse.ColumnParseNode;
+import org.apache.phoenix.parse.IndexExpressionParseNodeRewriter;
+import org.apache.phoenix.parse.HintNode;
+import org.apache.phoenix.parse.HintNode.Hint;
 import org.apache.phoenix.parse.ParseNode;
 import org.apache.phoenix.parse.ParseNodeFactory;
+import org.apache.phoenix.parse.ParseNodeRewriter;
 import org.apache.phoenix.parse.SelectStatement;
 import org.apache.phoenix.parse.TableNode;
 import org.apache.phoenix.query.QueryServices;
@@ -54,9 +56,8 @@ import org.apache.phoenix.schema.PDatum;
 import org.apache.phoenix.schema.PIndexState;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.PTable.IndexType;
-import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.PTableType;
-import org.apache.phoenix.schema.SaltingUtil;
+import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.util.IndexUtil;
 
 import com.google.common.collect.Lists;
@@ -232,7 +233,10 @@ public class QueryOptimizer {
         // Check index state of now potentially updated index table to make sure it's active
         if (PIndexState.ACTIVE.equals(resolver.getTables().get(0).getTable().getIndexState())) {
             try {
+            	// translate nodes that match expressions that are indexed to the associated column parse node
+                indexSelect = ParseNodeRewriter.rewrite(indexSelect, new  IndexExpressionParseNodeRewriter(index, statement.getConnection()));
                 QueryCompiler compiler = new QueryCompiler(statement, indexSelect, resolver, targetColumns, parallelIteratorFactory, dataPlan.getContext().getSequenceManager());
+                
                 QueryPlan plan = compiler.compile();
                 // If query doesn't have where clause and some of columns to project are missing
                 // in the index then we need to get missing columns from main table for each row in

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/BetweenParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/BetweenParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/BetweenParseNode.java
index cc65d89..961af20 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/BetweenParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/BetweenParseNode.java
@@ -49,4 +49,26 @@ public class BetweenParseNode extends CompoundParseNode {
         }
         return visitor.visitLeave(this, l);
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + (negate ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		BetweenParseNode other = (BetweenParseNode) obj;
+		if (negate != other.negate)
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/BindParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/BindParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/BindParseNode.java
index 75dfa90..5f649de 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/BindParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/BindParseNode.java
@@ -56,4 +56,26 @@ public class BindParseNode extends NamedParseNode {
         return ":" + index;
     }
 
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + index;
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		BindParseNode other = (BindParseNode) obj;
+		if (index != other.index)
+			return false;
+		return true;
+	}
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
index ea4e587..598a190 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CastParseNode.java
@@ -104,4 +104,42 @@ public class CastParseNode extends UnaryParseNode {
             throw TypeMismatchException.newException(fromDataType, targetDataType, firstChildExpr.toString());
 	    }
 	}
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + ((dt == null) ? 0 : dt.hashCode());
+		result = prime * result
+				+ ((maxLength == null) ? 0 : maxLength.hashCode());
+		result = prime * result + ((scale == null) ? 0 : scale.hashCode());
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		CastParseNode other = (CastParseNode) obj;
+		if (dt == null) {
+			if (other.dt != null)
+				return false;
+		} else if (!dt.equals(other.dt))
+			return false;
+		if (maxLength == null) {
+			if (other.maxLength != null)
+				return false;
+		} else if (!maxLength.equals(other.maxLength))
+			return false;
+		if (scale == null) {
+			if (other.scale != null)
+				return false;
+		} else if (!scale.equals(other.scale))
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
index 169754c..8032ba5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnDef.java
@@ -36,7 +36,7 @@ import com.google.common.base.Preconditions;
 /**
  * 
  * Represents a column definition during DDL
- *
+ * 
  * 
  * @since 0.1
  */
@@ -50,9 +50,10 @@ public class ColumnDef {
     private final SortOrder sortOrder;
     private final boolean isArray;
     private final Integer arrSize;
+    private final String expressionStr;
  
     ColumnDef(ColumnName columnDefName, String sqlTypeName, boolean isArray, Integer arrSize, Boolean isNull, Integer maxLength,
-    		            Integer scale, boolean isPK, SortOrder sortOrder) {
+    		            Integer scale, boolean isPK, SortOrder sortOrder, String expressionStr) {
    	 try {
          Preconditions.checkNotNull(sortOrder);
    	     PDataType localType = null;
@@ -133,13 +134,14 @@ public class ColumnDef {
          if(this.isArray) {
              this.dataType = localType;
          }
+         this.expressionStr = expressionStr;
      } catch (SQLException e) {
          throw new ParseException(e);
      }
     }
     ColumnDef(ColumnName columnDefName, String sqlTypeName, Boolean isNull, Integer maxLength,
-            Integer scale, boolean isPK, SortOrder sortOrder) {
-    	this(columnDefName, sqlTypeName, false, 0, isNull, maxLength, scale, isPK, sortOrder);
+            Integer scale, boolean isPK, SortOrder sortOrder, String expressionStr) {
+    	this(columnDefName, sqlTypeName, false, 0, isNull, maxLength, scale, isPK, sortOrder, expressionStr);
     }
 
     public ColumnName getColumnDefName() {
@@ -183,4 +185,8 @@ public class ColumnDef {
 	public Integer getArraySize() {
 		return arrSize;
 	}
+
+	public String getExpression() {
+		return expressionStr;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnName.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnName.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnName.java
index f613a05..82439ec 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnName.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnName.java
@@ -73,7 +73,7 @@ public class ColumnName {
 
     @Override
     public String toString() {
-        return SchemaUtil.getColumnName(getFamilyName(),getColumnName());
+		return SchemaUtil.getColumnName(getFamilyName(),getColumnName());
     }
     
     @Override

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnParseNode.java
index 19dbc68..e7489fd 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ColumnParseNode.java
@@ -28,6 +28,7 @@ import org.apache.phoenix.query.QueryConstants;
  * @since 0.1
  */
 public class ColumnParseNode extends NamedParseNode {
+    // table name can also represent a column family 
     private final TableName tableName;
     private final String fullName;
     private final String alias;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/CompoundParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/CompoundParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/CompoundParseNode.java
index 053a9cc..e0ab22b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CompoundParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CompoundParseNode.java
@@ -31,7 +31,8 @@ import java.util.List;
  * @since 0.1
  */
 public abstract class CompoundParseNode extends ParseNode {
-    private final List<ParseNode> children;
+
+	private final List<ParseNode> children;
     private final boolean isStateless;
     
     CompoundParseNode(List<ParseNode> children) {
@@ -70,4 +71,33 @@ public abstract class CompoundParseNode extends ParseNode {
     public String toString() {
         return this.getClass().getName() + children.toString();
     }
+    
+    @Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = 1;
+		result = prime * result
+				+ ((children == null) ? 0 : children.hashCode());
+		result = prime * result + (isStateless ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (obj == null)
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		CompoundParseNode other = (CompoundParseNode) obj;
+		if (children == null) {
+			if (other.children != null)
+				return false;
+		} else if (!children.equals(other.children))
+			return false;
+		if (isStateless != other.isStateless)
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/CreateIndexStatement.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/CreateIndexStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/CreateIndexStatement.java
index 669dc3f..bf76174 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/CreateIndexStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/CreateIndexStatement.java
@@ -29,7 +29,7 @@ import com.google.common.collect.ListMultimap;
 
 public class CreateIndexStatement extends SingleTableStatement {
     private final TableName indexTableName;
-    private final PrimaryKeyConstraint indexConstraint;
+    private final IndexKeyConstraint indexKeyConstraint;
     private final List<ColumnName> includeColumns;
     private final List<ParseNode> splitNodes;
     private final ListMultimap<String,Pair<String,Object>> props;
@@ -37,11 +37,11 @@ public class CreateIndexStatement extends SingleTableStatement {
     private final IndexType indexType;
 
     public CreateIndexStatement(NamedNode indexTableName, NamedTableNode dataTable, 
-            PrimaryKeyConstraint indexConstraint, List<ColumnName> includeColumns, List<ParseNode> splits,
+            IndexKeyConstraint indexKeyConstraint, List<ColumnName> includeColumns, List<ParseNode> splits,
             ListMultimap<String,Pair<String,Object>> props, boolean ifNotExists, IndexType indexType, int bindCount) {
         super(dataTable, bindCount);
         this.indexTableName =TableName.create(dataTable.getName().getSchemaName(),indexTableName.getName());
-        this.indexConstraint = indexConstraint == null ? PrimaryKeyConstraint.EMPTY : indexConstraint;
+        this.indexKeyConstraint = indexKeyConstraint == null ? IndexKeyConstraint.EMPTY : indexKeyConstraint;
         this.includeColumns = includeColumns == null ? Collections.<ColumnName>emptyList() : includeColumns;
         this.splitNodes = splits == null ? Collections.<ParseNode>emptyList() : splits;
         this.props = props == null ? ArrayListMultimap.<String,Pair<String,Object>>create() : props;
@@ -49,8 +49,8 @@ public class CreateIndexStatement extends SingleTableStatement {
         this.indexType = indexType;
     }
 
-    public PrimaryKeyConstraint getIndexConstraint() {
-        return indexConstraint;
+    public IndexKeyConstraint getIndexConstraint() {
+        return indexKeyConstraint;
     }
 
     public List<ColumnName> getIncludeColumns() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/ExistsParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ExistsParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ExistsParseNode.java
index 45ccdfe..fde7d76 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ExistsParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ExistsParseNode.java
@@ -50,4 +50,26 @@ public class ExistsParseNode extends UnaryParseNode {
         }
         return visitor.visitLeave(this, l);
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + (negate ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		ExistsParseNode other = (ExistsParseNode) obj;
+		if (negate != other.negate)
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/FamilyWildcardParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/FamilyWildcardParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/FamilyWildcardParseNode.java
index 9cfb345..2c939fc 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/FamilyWildcardParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/FamilyWildcardParseNode.java
@@ -49,5 +49,27 @@ public class FamilyWildcardParseNode extends NamedParseNode {
     public boolean isRewrite() {
         return isRewrite;
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + (isRewrite ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		FamilyWildcardParseNode other = (FamilyWildcardParseNode) obj;
+		if (isRewrite != other.isRewrite)
+			return false;
+		return true;
+	}
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
index e6ce6d1..c41fa4f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/FunctionParseNode.java
@@ -428,4 +428,35 @@ public class FunctionParseNode extends CompoundParseNode {
             return allowedValues;
         }
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + ((info == null) ? 0 : info.hashCode());
+		result = prime * result + ((name == null) ? 0 : name.hashCode());
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		FunctionParseNode other = (FunctionParseNode) obj;
+		if (info == null) {
+			if (other.info != null)
+				return false;
+		} else if (!info.equals(other.info))
+			return false;
+		if (name == null) {
+			if (other.name != null)
+				return false;
+		} else if (!name.equals(other.name))
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/InListParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/InListParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/InListParseNode.java
index 91f2b5c..fae15f5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/InListParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/InListParseNode.java
@@ -61,4 +61,26 @@ public class InListParseNode extends CompoundParseNode {
         }
         return visitor.visitLeave(this, l);
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + (negate ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		InListParseNode other = (InListParseNode) obj;
+		if (negate != other.negate)
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/InParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/InParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/InParseNode.java
index acd71b1..84984e9 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/InParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/InParseNode.java
@@ -56,4 +56,29 @@ public class InParseNode extends BinaryParseNode {
         }
         return visitor.visitLeave(this, l);
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + (isSubqueryDistinct ? 1231 : 1237);
+		result = prime * result + (negate ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		InParseNode other = (InParseNode) obj;
+		if (isSubqueryDistinct != other.isSubqueryDistinct)
+			return false;
+		if (negate != other.negate)
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexExpressionParseNodeRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexExpressionParseNodeRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexExpressionParseNodeRewriter.java
new file mode 100644
index 0000000..efa3835
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexExpressionParseNodeRewriter.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
+ * file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
+ * License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by
+ * applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language
+ * governing permissions and limitations under the License.
+ */
+package org.apache.phoenix.parse;
+
+import java.sql.SQLException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.phoenix.compile.ColumnResolver;
+import org.apache.phoenix.compile.ExpressionCompiler;
+import org.apache.phoenix.compile.FromCompiler;
+import org.apache.phoenix.compile.IndexStatementRewriter;
+import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixStatement;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.types.PDataType;
+import org.apache.phoenix.util.IndexUtil;
+
+import com.google.common.collect.Maps;
+
+/**
+ * Used to replace parse nodes in a SelectStatement that match expressions that are present in an indexed with the
+ * corresponding {@link ColumnParseNode}
+ */
+public class IndexExpressionParseNodeRewriter extends ParseNodeRewriter {
+
+    private final Map<ParseNode, ParseNode> indexedParseNodeToColumnParseNodeMap;
+    
+    private static class ColumnParseNodeVisitor extends StatelessTraverseAllParseNodeVisitor {
+        
+        private boolean isParseNodeCaseSensitive;
+        
+        public void reset() {
+            this.isParseNodeCaseSensitive = false;
+        }
+        
+        @Override
+        public Void visit(ColumnParseNode node) throws SQLException {
+            isParseNodeCaseSensitive = isParseNodeCaseSensitive  || node.isCaseSensitive() || node.isTableNameCaseSensitive();
+            return null;
+        }
+        
+        public boolean isParseNodeCaseSensitive() {
+            return isParseNodeCaseSensitive;
+        }
+        
+    }
+
+    public IndexExpressionParseNodeRewriter(PTable index, PhoenixConnection connection) throws SQLException {
+        indexedParseNodeToColumnParseNodeMap = Maps.newHashMapWithExpectedSize(index.getColumns().size());
+        NamedTableNode tableNode = NamedTableNode.create(null,
+                TableName.create(index.getParentSchemaName().getString(), index.getParentTableName().getString()),
+                Collections.<ColumnDef> emptyList());
+        ColumnResolver dataResolver = FromCompiler.getResolver(tableNode, connection);
+        StatementContext context = new StatementContext(new PhoenixStatement(connection), dataResolver);
+        IndexStatementRewriter rewriter = new IndexStatementRewriter(dataResolver, null);
+        ExpressionCompiler expressionCompiler = new ExpressionCompiler(context);
+        ColumnParseNodeVisitor columnParseNodeVisitor = new ColumnParseNodeVisitor();
+        int indexPosOffset = (index.getBucketNum() == null ? 0 : 1) + (index.isMultiTenant() ? 1 : 0) + (index.getViewIndexId() == null ? 0 : 1);
+        List<PColumn> pkColumns = index.getPKColumns();
+		for (int i=indexPosOffset; i<pkColumns.size(); ++i) {
+        	PColumn column = pkColumns.get(i);
+            if (column.getExpressionStr()==null) {
+                continue;
+            }
+            ParseNode expressionParseNode = SQLParser.parseCondition(column.getExpressionStr());
+            columnParseNodeVisitor.reset();
+            expressionParseNode.accept(columnParseNodeVisitor);
+            String colName = column.getName().getString();
+            if (columnParseNodeVisitor.isParseNodeCaseSensitive()) {
+                // force column name to be case sensitive name by surround with double quotes
+                colName = "\"" + colName + "\"";
+            }
+            
+            Expression dataExpression = expressionParseNode.accept(expressionCompiler);
+            PDataType expressionDataType = dataExpression.getDataType();
+            ParseNode indexedParseNode = expressionParseNode.accept(rewriter);
+            PDataType indexColType = IndexUtil.getIndexColumnDataType(dataExpression.isNullable(), expressionDataType);
+            ParseNode columnParseNode = new ColumnParseNode(null, colName, null);
+            if ( indexColType != expressionDataType) {
+                columnParseNode = NODE_FACTORY.cast(columnParseNode, expressionDataType, null, null);
+            }
+            indexedParseNodeToColumnParseNodeMap.put(indexedParseNode, columnParseNode);
+        }
+    }
+
+    @Override
+    protected ParseNode leaveCompoundNode(CompoundParseNode node, List<ParseNode> children, CompoundNodeFactory factory) {
+        return indexedParseNodeToColumnParseNodeMap.containsKey(node) ? indexedParseNodeToColumnParseNodeMap.get(node)
+                : super.leaveCompoundNode(node, children, factory);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexKeyConstraint.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexKeyConstraint.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexKeyConstraint.java
index 7043b9d..ef40c78 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexKeyConstraint.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/IndexKeyConstraint.java
@@ -17,21 +17,25 @@
  */
 package org.apache.phoenix.parse;
 
+import java.util.Collections;
 import java.util.List;
 
 import org.apache.hadoop.hbase.util.Pair;
 
 import com.google.common.collect.ImmutableList;
+
 import org.apache.phoenix.schema.SortOrder;
 
 public class IndexKeyConstraint {
-    private final List<Pair<ColumnParseNode, SortOrder>> columnNameToSortOrder;
+	public static final IndexKeyConstraint EMPTY = new IndexKeyConstraint(Collections.<Pair<ParseNode, SortOrder>>emptyList());
+
+    private final List<Pair<ParseNode, SortOrder>> columnNameToSortOrder;
     
-    IndexKeyConstraint(List<Pair<ColumnParseNode, SortOrder>> columnNameAndSortOrder) {
-        this.columnNameToSortOrder = ImmutableList.copyOf(columnNameAndSortOrder);
+    IndexKeyConstraint(List<Pair<ParseNode, SortOrder>> parseNodeAndSortOrder) {
+        this.columnNameToSortOrder = ImmutableList.copyOf(parseNodeAndSortOrder);
     }
 
-    public List<Pair<ColumnParseNode, SortOrder>> getColumns() {
+    public List<Pair<ParseNode, SortOrder>> getParseNodeAndSortOrderList() {
         return columnNameToSortOrder;
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/IsNullParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/IsNullParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/IsNullParseNode.java
index 21d0f8e..614cfd0 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/IsNullParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/IsNullParseNode.java
@@ -50,4 +50,26 @@ public class IsNullParseNode extends UnaryParseNode {
         }
         return visitor.visitLeave(this, l);
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + (negate ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		IsNullParseNode other = (IsNullParseNode) obj;
+		if (negate != other.negate)
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/LikeParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/LikeParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/LikeParseNode.java
index 9cec70e..41d252d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/LikeParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/LikeParseNode.java
@@ -59,4 +59,30 @@ public class LikeParseNode extends BinaryParseNode {
         }
         return visitor.visitLeave(this, l);
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result
+				+ ((likeType == null) ? 0 : likeType.hashCode());
+		result = prime * result + (negate ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		LikeParseNode other = (LikeParseNode) obj;
+		if (likeType != other.likeType)
+			return false;
+		if (negate != other.negate)
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java
index b83ce23..9e9184f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/LiteralParseNode.java
@@ -80,4 +80,25 @@ public class LiteralParseNode extends TerminalParseNode {
     public String toString() {
         return type == PVarchar.INSTANCE ? ("'" + value.toString() + "'") : value == null ? "null" : value.toString();
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = 1;
+		result = prime * result + ((type == null) ? 0 : type.hashCode());
+		result = prime * result + ((value == null) ? 0 : value.hashCode());
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (obj == null)
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		LiteralParseNode other = (LiteralParseNode) obj;
+		return type.isComparableTo(other.type) && type.compareTo(value, other.value, other.type) == 0;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedNode.java
index e799875..6cfeb60 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedNode.java
@@ -28,7 +28,7 @@ public class NamedNode {
         return new NamedNode(name,true);
     }
     
-    private NamedNode(String name, boolean isCaseSensitive) {
+    NamedNode(String name, boolean isCaseSensitive) {
         this.name = name;
         this.isCaseSensitive = isCaseSensitive;
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedParseNode.java
index fa4872f..51da80a 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/NamedParseNode.java
@@ -35,6 +35,10 @@ public abstract class NamedParseNode extends TerminalParseNode{
     NamedParseNode(String name) {
         this.namedNode = new NamedNode(name);
     }
+    
+    NamedParseNode(String name, boolean isCaseSensitive) {
+        this.namedNode = new NamedNode(name, isCaseSensitive);
+    }
 
     public String getName() {
         return namedNode.getName();
@@ -48,4 +52,30 @@ public abstract class NamedParseNode extends TerminalParseNode{
     public String toString() {
         return getName();
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = 1;
+		result = prime * result
+				+ ((namedNode == null) ? 0 : namedNode.hashCode());
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (obj == null)
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		NamedParseNode other = (NamedParseNode) obj;
+		if (namedNode == null) {
+			if (other.namedNode != null)
+				return false;
+		} else if (!namedNode.equals(other.namedNode))
+			return false;
+		return true;
+	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
index 0f40ece..57507b8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
@@ -53,16 +53,15 @@ import org.apache.phoenix.util.SchemaUtil;
 import com.google.common.collect.ListMultimap;
 import com.google.common.collect.Maps;
 
-
 /**
- *
+ * 
  * Factory used by parser to construct object model while parsing a SQL statement
- *
+ * 
  * 
  * @since 0.1
  */
 public class ParseNodeFactory {
-    private static final String ARRAY_ELEM = "ARRAY_ELEM";
+	private static final String ARRAY_ELEM = "ARRAY_ELEM";
 	// TODO: Use Google's Reflection library instead to find aggregate functions
     @SuppressWarnings("unchecked")
     private static final List<Class<? extends FunctionExpression>> CLIENT_SIDE_BUILT_IN_FUNCTIONS = Arrays.<Class<? extends FunctionExpression>>asList(
@@ -241,10 +240,10 @@ public class ParseNodeFactory {
         return new StringConcatParseNode(children);
     }
 
-    public ColumnParseNode column(TableName tableName, String name, String alias) {
-        return new ColumnParseNode(tableName,name,alias);
+    public ColumnParseNode column(TableName tableName, String columnName, String alias) {
+        return new ColumnParseNode(tableName, columnName, alias);
     }
-
+    
     public ColumnName columnName(String columnName) {
         return new ColumnName(columnName);
     }
@@ -261,25 +260,29 @@ public class ParseNodeFactory {
         return new PropertyName(familyName, propertyName);
     }
 
-    public ColumnDef columnDef(ColumnName columnDefName, String sqlTypeName, boolean isNull, Integer maxLength, Integer scale, boolean isPK, SortOrder sortOrder) {
-        return new ColumnDef(columnDefName, sqlTypeName, isNull, maxLength, scale, isPK, sortOrder);
+    public ColumnDef columnDef(ColumnName columnDefName, String sqlTypeName, boolean isNull, Integer maxLength, Integer scale, boolean isPK, SortOrder sortOrder, String expressionStr) {
+        return new ColumnDef(columnDefName, sqlTypeName, isNull, maxLength, scale, isPK, sortOrder, expressionStr);
     }
 
     public ColumnDef columnDef(ColumnName columnDefName, String sqlTypeName, boolean isArray, Integer arrSize, Boolean isNull, Integer maxLength, Integer scale, boolean isPK, 
         	SortOrder sortOrder) {
-        return new ColumnDef(columnDefName, sqlTypeName, isArray, arrSize, isNull, maxLength, scale, isPK, sortOrder);
+        return new ColumnDef(columnDefName, sqlTypeName, isArray, arrSize, isNull, maxLength, scale, isPK, sortOrder, null);
     }
 
     public PrimaryKeyConstraint primaryKey(String name, List<Pair<ColumnName, SortOrder>> columnNameAndSortOrder) {
         return new PrimaryKeyConstraint(name, columnNameAndSortOrder);
     }
+    
+    public IndexKeyConstraint indexKey( List<Pair<ParseNode, SortOrder>> parseNodeAndSortOrder) {
+        return new IndexKeyConstraint(parseNodeAndSortOrder);
+    }
 
     public CreateTableStatement createTable(TableName tableName, ListMultimap<String,Pair<String,Object>> props, List<ColumnDef> columns, PrimaryKeyConstraint pkConstraint, List<ParseNode> splits, PTableType tableType, boolean ifNotExists, TableName baseTableName, ParseNode tableTypeIdNode, int bindCount) {
         return new CreateTableStatement(tableName, props, columns, pkConstraint, splits, tableType, ifNotExists, baseTableName, tableTypeIdNode, bindCount);
     }
 
-    public CreateIndexStatement createIndex(NamedNode indexName, NamedTableNode dataTable, PrimaryKeyConstraint pkConstraint, List<ColumnName> includeColumns, List<ParseNode> splits, ListMultimap<String,Pair<String,Object>> props, boolean ifNotExists, IndexType indexType, int bindCount) {
-        return new CreateIndexStatement(indexName, dataTable, pkConstraint, includeColumns, splits, props, ifNotExists, indexType, bindCount);
+    public CreateIndexStatement createIndex(NamedNode indexName, NamedTableNode dataTable, IndexKeyConstraint ikConstraint, List<ColumnName> includeColumns, List<ParseNode> splits, ListMultimap<String,Pair<String,Object>> props, boolean ifNotExists, IndexType indexType, int bindCount) {
+        return new CreateIndexStatement(indexName, dataTable, ikConstraint, includeColumns, splits, props, ifNotExists, indexType, bindCount);
     }
 
     public CreateSequenceStatement createSequence(TableName tableName, ParseNode startsWith,
@@ -599,7 +602,12 @@ public class ParseNodeFactory {
         return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(), having,
                 statement.getOrderBy(), statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
     }
-
+    
+    public SelectStatement select(SelectStatement statement, List<AliasedNode> select, ParseNode where, List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy) {
+        return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), 
+                select, where, groupBy, having, orderBy, statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+    }
+    
     public SelectStatement select(SelectStatement statement, TableNode table) {
         return select(table, statement.getHint(), statement.isDistinct(), statement.getSelect(), statement.getWhere(), statement.getGroupBy(),
                 statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getBindCount(), statement.isAggregate(),

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/SequenceValueParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/SequenceValueParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/SequenceValueParseNode.java
index f29d79e..260584f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/SequenceValueParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/SequenceValueParseNode.java
@@ -60,4 +60,33 @@ public class SequenceValueParseNode extends TerminalParseNode {
     public Op getOp() {
         return op;
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = 1;
+		result = prime * result + ((op == null) ? 0 : op.hashCode());
+		result = prime * result
+				+ ((tableName == null) ? 0 : tableName.hashCode());
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (obj == null)
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		SequenceValueParseNode other = (SequenceValueParseNode) obj;
+		if (op != other.op)
+			return false;
+		if (tableName == null) {
+			if (other.tableName != null)
+				return false;
+		} else if (!tableName.equals(other.tableName))
+			return false;
+		return true;
+	}
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/SubqueryParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/SubqueryParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/SubqueryParseNode.java
index 92c5284..b7bcb64 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/SubqueryParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/SubqueryParseNode.java
@@ -49,5 +49,33 @@ public class SubqueryParseNode extends TerminalParseNode {
     public <T> T accept(ParseNodeVisitor<T> visitor) throws SQLException {
         return visitor.visit(this);
     }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = 1;
+		result = prime * result + (expectSingleRow ? 1231 : 1237);
+		result = prime * result + ((select == null) ? 0 : select.hashCode());
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (obj == null)
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		SubqueryParseNode other = (SubqueryParseNode) obj;
+		if (expectSingleRow != other.expectSingleRow)
+			return false;
+		if (select == null) {
+			if (other.select != null)
+				return false;
+		} else if (!select.equals(other.select))
+			return false;
+		return true;
+	}
     
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/TableName.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/TableName.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/TableName.java
index 9717067..654e899 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/TableName.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/TableName.java
@@ -21,6 +21,7 @@ import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.util.SchemaUtil;
 
 public class TableName {
+
     private final String tableName;
     private final String schemaName;
     private final boolean isTableNameCaseSensitive;
@@ -61,7 +62,7 @@ public class TableName {
     public String toString() {
         return (schemaName == null ? "" : schemaName + QueryConstants.NAME_SEPARATOR)  + tableName;
     }
-
+    
 	@Override
 	public int hashCode() {
 		final int prime = 31;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/TableWildcardParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/TableWildcardParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/TableWildcardParseNode.java
index 768ba5d..7292347 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/TableWildcardParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/TableWildcardParseNode.java
@@ -46,5 +46,34 @@ public class TableWildcardParseNode extends NamedParseNode {
         return visitor.visit(this);
     }
 
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = super.hashCode();
+		result = prime * result + (isRewrite ? 1231 : 1237);
+		result = prime * result
+				+ ((tableName == null) ? 0 : tableName.hashCode());
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (!super.equals(obj))
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		TableWildcardParseNode other = (TableWildcardParseNode) obj;
+		if (isRewrite != other.isRewrite)
+			return false;
+		if (tableName == null) {
+			if (other.tableName != null)
+				return false;
+		} else if (!tableName.equals(other.tableName))
+			return false;
+		return true;
+	}
+
 }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/parse/WildcardParseNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/WildcardParseNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/WildcardParseNode.java
index 59feeb5..fdfb64f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/WildcardParseNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/WildcardParseNode.java
@@ -51,6 +51,28 @@ public class WildcardParseNode extends TerminalParseNode {
 
     public boolean isRewrite() {
         return isRewrite;
-    }    
+    }
+
+	@Override
+	public int hashCode() {
+		final int prime = 31;
+		int result = 1;
+		result = prime * result + (isRewrite ? 1231 : 1237);
+		return result;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj)
+			return true;
+		if (obj == null)
+			return false;
+		if (getClass() != obj.getClass())
+			return false;
+		WildcardParseNode other = (WildcardParseNode) obj;
+		if (isRewrite != other.isRewrite)
+			return false;
+		return true;
+	}    
     
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateColumn.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateColumn.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateColumn.java
index be85635..6c6bcf7 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateColumn.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateColumn.java
@@ -21,6 +21,7 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 
+import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.util.SizedUtil;
 
 public class DelegateColumn extends DelegateDatum implements PColumn {
@@ -73,4 +74,9 @@ public class DelegateColumn extends DelegateDatum implements PColumn {
     public boolean isViewReferenced() {
         return getDelegate().isViewReferenced();
     }
+    
+    @Override
+    public String getExpressionStr() {
+        return getDelegate().getExpressionStr();
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java
index 38aac31..b719aae 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/DelegateTable.java
@@ -22,6 +22,7 @@ import java.util.List;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
 import org.apache.phoenix.index.IndexMaintainer;
+import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.schema.stats.PTableStats;
 
 public class DelegateTable implements PTable {
@@ -161,13 +162,13 @@ public class DelegateTable implements PTable {
     }
 
     @Override
-    public void getIndexMaintainers(ImmutableBytesWritable ptr) {
-        delegate.getIndexMaintainers(ptr);
+    public void getIndexMaintainers(ImmutableBytesWritable ptr, PhoenixConnection connection) {
+        delegate.getIndexMaintainers(ptr, connection);
     }
 
     @Override
-    public IndexMaintainer getIndexMaintainer(PTable dataTable) {
-        return delegate.getIndexMaintainer(dataTable);
+    public IndexMaintainer getIndexMaintainer(PTable dataTable, PhoenixConnection connection) {
+        return delegate.getIndexMaintainer(dataTable, connection);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
index 5791c82..09d2f66 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/MetaDataClient.java
@@ -24,6 +24,7 @@ import static org.apache.hadoop.hbase.HColumnDescriptor.TTL;
 import static org.apache.phoenix.exception.SQLExceptionCode.INSUFFICIENT_MULTI_TENANT_COLUMNS;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ARRAY_SIZE;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_COUNT;
+import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_DEF;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_FAMILY;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_NAME;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_SIZE;
@@ -102,11 +103,13 @@ import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.ColumnResolver;
 import org.apache.phoenix.compile.ExplainPlan;
 import org.apache.phoenix.compile.FromCompiler;
+import org.apache.phoenix.compile.IndexExpressionCompiler;
 import org.apache.phoenix.compile.MutationPlan;
 import org.apache.phoenix.compile.PostDDLCompiler;
 import org.apache.phoenix.compile.PostIndexDDLCompiler;
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.compile.StatementNormalizer;
 import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
@@ -114,6 +117,9 @@ import org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
 import org.apache.phoenix.execute.MutationState;
+import org.apache.phoenix.expression.Determinism;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.RowKeyColumnExpression;
 import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.jdbc.PhoenixConnection;
@@ -131,7 +137,9 @@ import org.apache.phoenix.parse.DropColumnStatement;
 import org.apache.phoenix.parse.DropIndexStatement;
 import org.apache.phoenix.parse.DropSequenceStatement;
 import org.apache.phoenix.parse.DropTableStatement;
+import org.apache.phoenix.parse.IndexKeyConstraint;
 import org.apache.phoenix.parse.NamedTableNode;
+import org.apache.phoenix.parse.ParseNode;
 import org.apache.phoenix.parse.ParseNodeFactory;
 import org.apache.phoenix.parse.PrimaryKeyConstraint;
 import org.apache.phoenix.parse.TableName;
@@ -257,8 +265,9 @@ public class MetaDataClient {
         VIEW_CONSTANT + "," +
         IS_VIEW_REFERENCED + "," +
         PK_NAME + "," +  // write this both in the column and table rows for access by metadata APIs
-        KEY_SEQ +
-        ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
+        KEY_SEQ + "," +
+        COLUMN_DEF +
+        ") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
     private static final String UPDATE_COLUMN_POSITION =
         "UPSERT INTO " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_CATALOG_TABLE + "\" ( " +
         TENANT_ID + "," +
@@ -322,6 +331,7 @@ public class MetaDataClient {
             table = connection.getMetaDataCache().getTable(new PTableKey(tenantId, fullTableName));
             tableTimestamp = table.getTimeStamp();
         } catch (TableNotFoundException e) {
+            System.err.println(e);
             // TODO: Try again on services cache, as we may be looking for
             // a global multi-tenant table
         }
@@ -499,6 +509,11 @@ public class MetaDataClient {
         } else {
             colUpsert.setShort(17, keySeq);
         }
+        if (column.getExpressionStr() == null) {
+            colUpsert.setNull(18, Types.VARCHAR);
+        } else {
+            colUpsert.setString(18, column.getExpressionStr());
+        }
         colUpsert.execute();
     }
 
@@ -545,7 +560,7 @@ public class MetaDataClient {
             }
 
             PColumn column = new PColumnImpl(PNameFactory.newName(columnName), familyName, def.getDataType(),
-                    def.getMaxLength(), def.getScale(), isNull, position, sortOrder, def.getArraySize(), null, false);
+                    def.getMaxLength(), def.getScale(), isNull, position, sortOrder, def.getArraySize(), null, false, def.getExpression());
             return column;
         } catch (IllegalArgumentException e) { // Based on precondition check in constructor
             throw new SQLException(e);
@@ -762,11 +777,11 @@ public class MetaDataClient {
                 List<PTable> indexes = Lists.newArrayListWithExpectedSize(1);
                 // Only build newly created index.
                 indexes.add(index);
-                IndexMaintainer.serialize(dataTable, ptr, indexes);
+                IndexMaintainer.serialize(dataTable, ptr, indexes, plan.getContext().getConnection());
                 scan.setAttribute(BaseScannerRegionObserver.LOCAL_INDEX_BUILD, ByteUtil.copyKeyBytesIfNecessary(ptr));
                 // By default, we'd use a FirstKeyOnly filter as nothing else needs to be projected for count(*).
                 // However, in this case, we need to project all of the data columns that contribute to the index.
-                IndexMaintainer indexMaintainer = index.getIndexMaintainer(dataTable);
+                IndexMaintainer indexMaintainer = index.getIndexMaintainer(dataTable, connection);
                 for (ColumnReference columnRef : indexMaintainer.getAllColumns()) {
                     scan.addColumn(columnRef.getFamily(), columnRef.getQualifier());
                 }
@@ -884,10 +899,10 @@ public class MetaDataClient {
      * @throws SQLException
      */
     public MutationState createIndex(CreateIndexStatement statement, byte[][] splits) throws SQLException {
-        PrimaryKeyConstraint pk = statement.getIndexConstraint();
+        IndexKeyConstraint ik = statement.getIndexConstraint();
         TableName indexTableName = statement.getIndexTableName();
-
-        List<Pair<ColumnName, SortOrder>> indexedPkColumns = pk.getColumnNames();
+        
+        List<Pair<ParseNode, SortOrder>> indexParseNodeAndSortOrderList = ik.getParseNodeAndSortOrderList();
         List<ColumnName> includedColumns = statement.getIncludeColumns();
         TableRef tableRef = null;
         PTable table = null;
@@ -915,24 +930,30 @@ public class MetaDataClient {
                     }
                 }
                 int posOffset = 0;
-                Set<PColumn> unusedPkColumns;
+                List<PColumn> pkColumns = dataTable.getPKColumns();
+                Set<RowKeyColumnExpression> unusedPkColumns;
                 if (dataTable.getBucketNum() != null) { // Ignore SALT column
-                    unusedPkColumns = new LinkedHashSet<PColumn>(dataTable.getPKColumns().subList(1, dataTable.getPKColumns().size()));
-                    posOffset++;
+                	unusedPkColumns = Sets.newLinkedHashSetWithExpectedSize(pkColumns.size()-1);
+                	posOffset++;
                 } else {
-                    unusedPkColumns = new LinkedHashSet<PColumn>(dataTable.getPKColumns());
+                	unusedPkColumns = Sets.newLinkedHashSetWithExpectedSize(pkColumns.size());
+                }
+                for (int i = posOffset; i < pkColumns.size(); i++) {
+                    PColumn column = pkColumns.get(i);
+					unusedPkColumns.add(new RowKeyColumnExpression(column, new RowKeyValueAccessor(pkColumns, i), "\""+column.getName().getString()+"\""));
                 }
                 List<Pair<ColumnName, SortOrder>> allPkColumns = Lists.newArrayListWithExpectedSize(unusedPkColumns.size());
-                List<ColumnDef> columnDefs = Lists.newArrayListWithExpectedSize(includedColumns.size() + indexedPkColumns.size());
-
+                List<ColumnDef> columnDefs = Lists.newArrayListWithExpectedSize(includedColumns.size() + indexParseNodeAndSortOrderList.size());
+                
                 if (dataTable.isMultiTenant()) {
                     // Add tenant ID column as first column in index
                     PColumn col = dataTable.getPKColumns().get(posOffset);
-                    unusedPkColumns.remove(col);
+                    RowKeyColumnExpression columnExpression = new RowKeyColumnExpression(col, new RowKeyValueAccessor(pkColumns, posOffset), col.getName().getString());
+					unusedPkColumns.remove(columnExpression);
                     PDataType dataType = IndexUtil.getIndexColumnDataType(col);
                     ColumnName colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(col));
                     allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, col.getSortOrder()));
-                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, SortOrder.getDefault()));
+                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, SortOrder.getDefault(), col.getName().getString()));
                 }
                 /*
                  * Allocate an index ID in two circumstances:
@@ -945,55 +966,81 @@ public class MetaDataClient {
                     PDataType dataType = MetaDataUtil.getViewIndexIdDataType();
                     ColumnName colName = ColumnName.caseSensitiveColumnName(MetaDataUtil.getViewIndexIdColumnName());
                     allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, SortOrder.getDefault()));
-                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), false, null, null, false, SortOrder.getDefault()));
+                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), false, null, null, false, SortOrder.getDefault(), null));
                 }
-                // First columns are the indexed ones
-                for (Pair<ColumnName, SortOrder> pair : indexedPkColumns) {
-                    ColumnName colName = pair.getFirst();
-                    PColumn col = resolver.resolveColumn(null, colName.getFamilyName(), colName.getColumnName()).getColumn();
-                    unusedPkColumns.remove(col);
-                    // Ignore view constants for updatable views as we don't need these in the index
-                    if (col.getViewConstant() == null) {
-                        PDataType dataType = IndexUtil.getIndexColumnDataType(col);
-                        colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(col));
-                        allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, pair.getSecond()));
-                        columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, SortOrder.getDefault()));
-                    }
+                
+                PhoenixStatement phoenixStatment = new PhoenixStatement(connection);
+                StatementContext context = new StatementContext(phoenixStatment, resolver);
+                IndexExpressionCompiler expressionIndexCompiler = new IndexExpressionCompiler(context);
+                Set<ColumnName> indexedColumnNames = Sets.newHashSetWithExpectedSize(indexParseNodeAndSortOrderList.size());
+                for (Pair<ParseNode, SortOrder> pair : indexParseNodeAndSortOrderList) {
+                	ParseNode parseNode = pair.getFirst();
+                    // normalize the parse node
+                    parseNode = StatementNormalizer.normalize(parseNode, resolver);
+                    // compile the parseNode to get an expression
+                    expressionIndexCompiler.reset();
+                    Expression expression = parseNode.accept(expressionIndexCompiler);   
+                    if (expressionIndexCompiler.isAggregate()) {
+                        throw new SQLExceptionInfo.Builder(SQLExceptionCode.AGGREGATE_EXPRESSION_NOT_ALLOWED_IN_INDEX).build().buildException();
+                    }
+                    if (expression.getDeterminism() != Determinism.ALWAYS) {
+                        throw new SQLExceptionInfo.Builder(SQLExceptionCode.NON_DETERMINISTIC_EXPRESSION_NOT_ALLOWED_IN_INDEX).build().buildException();
+                    }
+                    // true for any constant (including a view constant), as we don't need these in the index
+                    if (expression.isStateless()) {
+                        continue;
+                    }
+                    unusedPkColumns.remove(expression);
+                    
+                    ColumnName colName = null;
+                    ColumnRef colRef = expressionIndexCompiler.getColumnRef();
+					if (colRef!=null) { 
+						// if this is a regular column
+					    PColumn column = colRef.getColumn();
+					    String columnFamilyName = column.getFamilyName()!=null ? column.getFamilyName().getString() : null;
+					    colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(columnFamilyName, column.getName().getString()));
+					}
+					else { 
+						// if this is an expression
+					    // TODO column names cannot have double quotes, remove this once this PHOENIX-1621 is fixed
+						String name = expression.toString().replaceAll("\"", "'");
+                        colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(null, name));
+					}
+					indexedColumnNames.add(colName);
+                	PDataType dataType = IndexUtil.getIndexColumnDataType(expression.isNullable(), expression.getDataType());
+                    allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, pair.getSecond()));
+                    columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), expression.isNullable(), expression.getMaxLength(), expression.getScale(), false, pair.getSecond(), expression.toString()));
                 }
 
                 // Next all the PK columns from the data table that aren't indexed
                 if (!unusedPkColumns.isEmpty()) {
-                    for (PColumn col : unusedPkColumns) {
+                    for (RowKeyColumnExpression colExpression : unusedPkColumns) {
+                        PColumn col = dataTable.getPKColumns().get(colExpression.getPosition());
                         // Don't add columns with constant values from updatable views, as
                         // we don't need these in the index
                         if (col.getViewConstant() == null) {
                             ColumnName colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(col));
-                            allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, col.getSortOrder()));
-                            PDataType dataType = IndexUtil.getIndexColumnDataType(col);
-                            columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, col.getSortOrder()));
+                            allPkColumns.add(new Pair<ColumnName, SortOrder>(colName, colExpression.getSortOrder()));
+                            PDataType dataType = IndexUtil.getIndexColumnDataType(colExpression.isNullable(), colExpression.getDataType());
+                            columnDefs.add(FACTORY.columnDef(colName, dataType.getSqlTypeName(),
+                                    colExpression.isNullable(), colExpression.getMaxLength(), colExpression.getScale(),
+                                    false, colExpression.getSortOrder(), colExpression.toString()));
                         }
                     }
                 }
-                pk = FACTORY.primaryKey(null, allPkColumns);
-
+                
                 // Last all the included columns (minus any PK columns)
                 for (ColumnName colName : includedColumns) {
                     PColumn col = resolver.resolveColumn(null, colName.getFamilyName(), colName.getColumnName()).getColumn();
-                    if (SchemaUtil.isPKColumn(col)) {
-                        if (!unusedPkColumns.contains(col)) {
-                            throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_EXIST_IN_DEF).build().buildException();
-                        }
-                    } else {
-                        colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(col));
-                        // Check for duplicates between indexed and included columns
-                        if (pk.contains(colName)) {
-                            throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_EXIST_IN_DEF).build().buildException();
-                        }
-                        if (!SchemaUtil.isPKColumn(col) && col.getViewConstant() == null) {
-                            // Need to re-create ColumnName, since the above one won't have the column family name
-                            colName = ColumnName.caseSensitiveColumnName(col.getFamilyName().getString(), IndexUtil.getIndexColumnName(col));
-                            columnDefs.add(FACTORY.columnDef(colName, col.getDataType().getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, col.getSortOrder()));
-                        }
+                    colName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(col));
+                    // Check for duplicates between indexed and included columns
+                    if (indexedColumnNames.contains(colName)) {
+                        throw new SQLExceptionInfo.Builder(SQLExceptionCode.COLUMN_EXIST_IN_DEF).build().buildException();
+                    }
+                    if (!SchemaUtil.isPKColumn(col) && col.getViewConstant() == null) {
+                        // Need to re-create ColumnName, since the above one won't have the column family name
+                        colName = ColumnName.caseSensitiveColumnName(col.getFamilyName().getString(), IndexUtil.getIndexColumnName(col));
+                        columnDefs.add(FACTORY.columnDef(colName, col.getDataType().getSqlTypeName(), col.isNullable(), col.getMaxLength(), col.getScale(), false, col.getSortOrder(), null));
                     }
                 }
 
@@ -1030,6 +1077,7 @@ public class MetaDataClient {
                 if (dataTable.getDefaultFamilyName() != null && dataTable.getType() != PTableType.VIEW && indexId == null) {
                     statement.getProps().put("", new Pair<String,Object>(DEFAULT_COLUMN_FAMILY_NAME,dataTable.getDefaultFamilyName().getString()));
                 }
+                PrimaryKeyConstraint pk = FACTORY.primaryKey(null, allPkColumns);
                 CreateTableStatement tableStatement = FACTORY.createTable(indexTableName, statement.getProps(), columnDefs, pk, statement.getSplitNodes(), PTableType.INDEX, statement.ifNotExists(), null, null, statement.getBindCount());
                 table = createTableInternal(tableStatement, splits, dataTable, null, null, null, null, indexId, statement.getIndexType());
                 break;
@@ -2051,7 +2099,7 @@ public class MetaDataClient {
                     }
                 }
 
-                boolean isAddingPKColumn = false;
+                int numPkColumnsAdded = 0;
                 PreparedStatement colUpsert = connection.prepareStatement(INSERT_COLUMN);
 
                 List<PColumn> columns = Lists.newArrayListWithExpectedSize(columnDefs.size());
@@ -2077,7 +2125,7 @@ public class MetaDataClient {
 
                         // TODO: support setting properties on other families?
                         if (column.getFamilyName() == null) {
-                            isAddingPKColumn = true;
+                            ++numPkColumnsAdded;
                             pkName = table.getPKName() == null ? null : table.getPKName().getString();
                             keySeq = ++nextKeySeq;
                         } else {
@@ -2088,15 +2136,26 @@ public class MetaDataClient {
                     }
 
                     // Add any new PK columns to end of index PK
-                    if (isAddingPKColumn) {
+                    if (numPkColumnsAdded>0) {
+                    	// create PK column list that includes the newly created columns
+                    	List<PColumn> pkColumns = Lists.newArrayListWithExpectedSize(table.getPKColumns().size()+numPkColumnsAdded);
+                    	pkColumns.addAll(table.getPKColumns());
+                    	for (int i=0; i<columnDefs.size(); ++i) {
+                    		if (columnDefs.get(i).isPK()) {
+                    			pkColumns.add(columns.get(i));
+                    		}
+                    	}
+                    	int pkSlotPosition = table.getPKColumns().size()-1;
                         for (PTable index : table.getIndexes()) {
                             short nextIndexKeySeq = SchemaUtil.getMaxKeySeq(index);
                             int indexPosition = index.getColumns().size();
-                            for (ColumnDef colDef : columnDefs) {
+                            for (int i=0; i<columnDefs.size(); ++i) {
+                            	ColumnDef colDef = columnDefs.get(i);
                                 if (colDef.isPK()) {
                                     PDataType indexColDataType = IndexUtil.getIndexColumnDataType(colDef.isNull(), colDef.getDataType());
                                     ColumnName indexColName = ColumnName.caseSensitiveColumnName(IndexUtil.getIndexColumnName(null, colDef.getColumnDefName().getColumnName()));
-                                    ColumnDef indexColDef = FACTORY.columnDef(indexColName, indexColDataType.getSqlTypeName(), colDef.isNull(), colDef.getMaxLength(), colDef.getScale(), true, colDef.getSortOrder());
+                                    Expression expression = new RowKeyColumnExpression(columns.get(i), new RowKeyValueAccessor(pkColumns, ++pkSlotPosition));
+                                    ColumnDef indexColDef = FACTORY.columnDef(indexColName, indexColDataType.getSqlTypeName(), colDef.isNull(), colDef.getMaxLength(), colDef.getScale(), true, colDef.getSortOrder(), expression.toString());
                                     PColumn indexColumn = newColumn(indexPosition++, indexColDef, PrimaryKeyConstraint.EMPTY, null, true);
                                     addColumnMutation(schemaName, index.getTableName().getString(), indexColumn, colUpsert, index.getParentTableName().getString(), index.getPKName() == null ? null : index.getPKName().getString(), ++nextIndexKeySeq, index.getBucketNum() != null);
                                 }
@@ -2124,7 +2183,7 @@ public class MetaDataClient {
                     }
                 }
 
-                if (isAddingPKColumn && !table.getIndexes().isEmpty()) {
+                if (numPkColumnsAdded>0 && !table.getIndexes().isEmpty()) {
                     for (PTable index : table.getIndexes()) {
                         incrementTableSeqNum(index, index.getType(), 1);
                     }
@@ -2172,7 +2231,7 @@ public class MetaDataClient {
 
                     // Only update client side cache if we aren't adding a PK column to a table with indexes.
                     // We could update the cache manually then too, it'd just be a pain.
-                    if (!isAddingPKColumn || table.getIndexes().isEmpty()) {
+                    if (numPkColumnsAdded==0 || table.getIndexes().isEmpty()) {
                         connection.addColumn(tenantId, SchemaUtil.getTableName(schemaName, tableName), columns, result.getMutationTime(), seqNum, isImmutableRows == null ? table.isImmutableRows() : isImmutableRows, disableWAL == null ? table.isWALDisabled() : disableWAL, multiTenant == null ? table.isMultiTenant() : multiTenant, storeNulls == null ? table.getStoreNulls() : storeNulls);
                     }
                     // Delete rows in view index if we haven't dropped it already

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java
index 54eeaf0..fbc737c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumn.java
@@ -17,6 +17,7 @@
  */
 package org.apache.phoenix.schema;
 
+
 /**
  * Definition of a Phoenix column
  *
@@ -50,4 +51,6 @@ public interface PColumn extends PDatum {
     boolean isViewReferenced();
     
     int getEstimatedSize();
+    
+    String getExpressionStr();
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
index 47963c2..11cc53d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PColumnImpl.java
@@ -37,6 +37,7 @@ public class PColumnImpl implements PColumn {
     private Integer arraySize;
     private byte[] viewConstant;
     private boolean isViewReferenced;
+    private String expressionStr;
 
     public PColumnImpl() {
     }
@@ -48,13 +49,13 @@ public class PColumnImpl implements PColumn {
                        Integer scale,
                        boolean nullable,
                        int position,
-                       SortOrder sortOrder, Integer arrSize, byte[] viewConstant, boolean isViewReferenced) {
-        init(name, familyName, dataType, maxLength, scale, nullable, position, sortOrder, arrSize, viewConstant, isViewReferenced);
+                       SortOrder sortOrder, Integer arrSize, byte[] viewConstant, boolean isViewReferenced, String expressionStr) {
+        init(name, familyName, dataType, maxLength, scale, nullable, position, sortOrder, arrSize, viewConstant, isViewReferenced, expressionStr);
     }
 
     public PColumnImpl(PColumn column, int position) {
         this(column.getName(), column.getFamilyName(), column.getDataType(), column.getMaxLength(),
-                column.getScale(), column.isNullable(), position, column.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced());
+                column.getScale(), column.isNullable(), position, column.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced(), column.getExpressionStr());
     }
 
     private void init(PName name,
@@ -66,7 +67,7 @@ public class PColumnImpl implements PColumn {
             int position,
             SortOrder sortOrder,
             Integer arrSize,
-            byte[] viewConstant, boolean isViewReferenced) {
+            byte[] viewConstant, boolean isViewReferenced, String expressionStr) {
     	Preconditions.checkNotNull(sortOrder);
         this.dataType = dataType;
         if (familyName == null) {
@@ -88,6 +89,7 @@ public class PColumnImpl implements PColumn {
         this.arraySize = arrSize;
         this.viewConstant = viewConstant;
         this.isViewReferenced = isViewReferenced;
+        this.expressionStr = expressionStr;
     }
 
     @Override
@@ -121,6 +123,11 @@ public class PColumnImpl implements PColumn {
     public Integer getScale() {
         return scale;
     }
+    
+    @Override
+    public String getExpressionStr() {
+        return expressionStr;
+    }
 
     @Override
     public boolean isNullable() {
@@ -221,9 +228,12 @@ public class PColumnImpl implements PColumn {
         if (column.hasViewReferenced()) {
             isViewReferenced = column.getViewReferenced();
         }
-
+        String expressionStr = null;
+        if (column.hasExpression()) {
+	        expressionStr = column.getExpression();
+        }
         return new PColumnImpl(columnName, familyName, dataType, maxLength, scale, nullable, position, sortOrder,
-                arraySize, viewConstant, isViewReferenced);
+                arraySize, viewConstant, isViewReferenced, expressionStr);
     }
 
     public static PTableProtos.PColumn toProto(PColumn column) {
@@ -249,6 +259,10 @@ public class PColumnImpl implements PColumn {
             builder.setViewConstant(HBaseZeroCopyByteString.wrap(column.getViewConstant()));
         }
         builder.setViewReferenced(column.isViewReferenced());
+        
+        if (column.getExpressionStr() != null) {
+            builder.setExpression(column.getExpressionStr());
+        }
         return builder.build();
     }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java
index d3f4273..2f84c95 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PMetaDataImpl.java
@@ -393,7 +393,7 @@ public class PMetaDataImpl implements PMetaData {
             // Update position of columns that follow removed column
             for (int i = position+1; i < oldColumns.size(); i++) {
                 PColumn oldColumn = oldColumns.get(i);
-                PColumn newColumn = new PColumnImpl(oldColumn.getName(), oldColumn.getFamilyName(), oldColumn.getDataType(), oldColumn.getMaxLength(), oldColumn.getScale(), oldColumn.isNullable(), i-1+positionOffset, oldColumn.getSortOrder(), oldColumn.getArraySize(), oldColumn.getViewConstant(), oldColumn.isViewReferenced());
+                PColumn newColumn = new PColumnImpl(oldColumn.getName(), oldColumn.getFamilyName(), oldColumn.getDataType(), oldColumn.getMaxLength(), oldColumn.getScale(), oldColumn.isNullable(), i-1+positionOffset, oldColumn.getSortOrder(), oldColumn.getArraySize(), oldColumn.getViewConstant(), oldColumn.isViewReferenced(), null);
                 columns.add(newColumn);
             }
             

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
index ee4bebc..d0fea88 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTable.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
 import org.apache.phoenix.index.IndexMaintainer;
+import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.schema.stats.PTableStats;
 
 
@@ -208,7 +209,7 @@ public interface PTable {
      * @throws AmbiguousColumnException if multiple columns are found with the given name
      */
     PColumn getColumn(String name) throws ColumnNotFoundException, AmbiguousColumnException;
-
+    
     /**
      * Get the PK column with the given name.
      * @param name the column name
@@ -306,8 +307,8 @@ public interface PTable {
     PName getPhysicalName();
     boolean isImmutableRows();
 
-    void getIndexMaintainers(ImmutableBytesWritable ptr);
-    IndexMaintainer getIndexMaintainer(PTable dataTable);
+    void getIndexMaintainers(ImmutableBytesWritable ptr, PhoenixConnection connection);
+    IndexMaintainer getIndexMaintainer(PTable dataTable, PhoenixConnection connection);
     PName getDefaultFamilyName();
 
     boolean isWALDisabled();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/8c340f5a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
index acce857..08f74b7 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/PTableImpl.java
@@ -46,6 +46,7 @@ import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
 import org.apache.phoenix.index.IndexMaintainer;
+import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.protobuf.ProtobufUtil;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.RowKeySchema.RowKeySchemaBuilder;
@@ -324,15 +325,16 @@ public class PTableImpl implements PTable {
         this.tableStats = stats;
         List<PColumn> pkColumns;
         PColumn[] allColumns;
-
+        
         this.columnsByName = ArrayListMultimap.create(columns.size(), 1);
+        int numPKColumns = 0;
         if (bucketNum != null) {
             // Add salt column to allColumns and pkColumns, but don't add to
             // columnsByName, since it should not be addressable via name.
             allColumns = new PColumn[columns.size()+1];
             allColumns[SALTING_COLUMN.getPosition()] = SALTING_COLUMN;
             pkColumns = Lists.newArrayListWithExpectedSize(columns.size()+1);
-            pkColumns.add(SALTING_COLUMN);
+            ++numPKColumns;
         } else {
             allColumns = new PColumn[columns.size()];
             pkColumns = Lists.newArrayListWithExpectedSize(columns.size());
@@ -342,7 +344,7 @@ public class PTableImpl implements PTable {
             allColumns[column.getPosition()] = column;
             PName familyName = column.getFamilyName();
             if (familyName == null) {
-                pkColumns.add(column);
+                ++numPKColumns;
             }
             String columnName = column.getName().getString();
             if (columnsByName.put(columnName, column)) {
@@ -360,19 +362,21 @@ public class PTableImpl implements PTable {
         estimatedSize += SizedUtil.sizeOfMap(allColumns.length, SizedUtil.POINTER_SIZE, SizedUtil.sizeOfArrayList(1)); // for multi-map
 
         this.bucketNum = bucketNum;
-        this.pkColumns = ImmutableList.copyOf(pkColumns);
         this.allColumns = ImmutableList.copyOf(allColumns);
-        estimatedSize += SizedUtil.sizeOfMap(pkColumns.size()) + SizedUtil.sizeOfMap(allColumns.length);
+        estimatedSize += SizedUtil.sizeOfMap(numPKColumns) + SizedUtil.sizeOfMap(allColumns.length);
 
-        RowKeySchemaBuilder builder = new RowKeySchemaBuilder(pkColumns.size());
+        RowKeySchemaBuilder builder = new RowKeySchemaBuilder(numPKColumns);
         // Two pass so that column order in column families matches overall column order
         // and to ensure that column family order is constant
-        int maxExpectedSize = allColumns.length - pkColumns.size();
+        int maxExpectedSize = allColumns.length - numPKColumns;
         // Maintain iteration order so that column families are ordered as they are listed
         Map<PName, List<PColumn>> familyMap = Maps.newLinkedHashMap();
         for (PColumn column : allColumns) {
             PName familyName = column.getFamilyName();
             if (familyName == null) {
+            	 pkColumns.add(column);
+            }
+            if (familyName == null) {
                 estimatedSize += column.getEstimatedSize(); // PK columns
                 builder.addField(column, column.isNullable(), column.getSortOrder());
             } else {
@@ -384,6 +388,7 @@ public class PTableImpl implements PTable {
                 columnsInFamily.add(column);
             }
         }
+        this.pkColumns = ImmutableList.copyOf(pkColumns);
         this.rowKeySchema = builder.build();
         estimatedSize += rowKeySchema.getEstimatedSize();
         Iterator<Map.Entry<PName,List<PColumn>>> iterator = familyMap.entrySet().iterator();
@@ -804,21 +809,21 @@ public class PTableImpl implements PTable {
     }
 
     @Override
-    public synchronized IndexMaintainer getIndexMaintainer(PTable dataTable) {
+    public synchronized IndexMaintainer getIndexMaintainer(PTable dataTable, PhoenixConnection connection) {
         if (indexMaintainer == null) {
-            indexMaintainer = IndexMaintainer.create(dataTable, this);
+            indexMaintainer = IndexMaintainer.create(dataTable, this, connection);
         }
         return indexMaintainer;
     }
 
     @Override
-    public synchronized void getIndexMaintainers(ImmutableBytesWritable ptr) {
+    public synchronized void getIndexMaintainers(ImmutableBytesWritable ptr, PhoenixConnection connection) {
         if (indexMaintainersPtr == null) {
             indexMaintainersPtr = new ImmutableBytesWritable();
             if (indexes.isEmpty()) {
                 indexMaintainersPtr.set(ByteUtil.EMPTY_BYTE_ARRAY);
             } else {
-                IndexMaintainer.serialize(this, indexMaintainersPtr);
+                IndexMaintainer.serialize(this, indexMaintainersPtr, connection);
             }
         }
         ptr.set(indexMaintainersPtr.get(), indexMaintainersPtr.getOffset(), indexMaintainersPtr.getLength());