You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ma...@apache.org on 2014/11/01 20:00:23 UTC

[1/5] PHOENIX-944 Support derived tables in FROM clause that needs extra steps of client-side aggregation or other processing

Repository: phoenix
Updated Branches:
  refs/heads/4.0 77f24050c -> 91e29c60d


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java
new file mode 100644
index 0000000..346a9fd
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java
@@ -0,0 +1,276 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.execute;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.phoenix.compile.ColumnProjector;
+import org.apache.phoenix.compile.JoinCompiler.ProjectedPTableWrapper;
+import org.apache.phoenix.compile.RowProjector;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.ExpressionType;
+import org.apache.phoenix.schema.KeyValueSchema;
+import org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.ValueBitSet;
+import org.apache.phoenix.schema.tuple.BaseTuple;
+import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.util.KeyValueUtil;
+import org.apache.phoenix.util.SchemaUtil;
+
+public class TupleProjector {    
+    public static final byte[] VALUE_COLUMN_FAMILY = Bytes.toBytes("_v");
+    public static final byte[] VALUE_COLUMN_QUALIFIER = new byte[0];
+    
+    private static final String SCAN_PROJECTOR = "scanProjector";
+    
+    private final KeyValueSchema schema;
+    private final Expression[] expressions;
+    private ValueBitSet valueSet;
+    private final ImmutableBytesWritable ptr = new ImmutableBytesWritable();
+    
+    public TupleProjector(RowProjector rowProjector) {
+        List<? extends ColumnProjector> columnProjectors = rowProjector.getColumnProjectors();
+        int count = columnProjectors.size();
+        KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(0);
+        expressions = new Expression[count];
+        for (int i = 0; i < count; i++) {
+            Expression expression = columnProjectors.get(i).getExpression();
+            builder.addField(expression);
+            expressions[i] = expression;
+        }
+        schema = builder.build();
+        valueSet = ValueBitSet.newInstance(schema);
+    }
+    
+    public TupleProjector(ProjectedPTableWrapper projected) {
+    	List<PColumn> columns = projected.getTable().getColumns();
+    	expressions = new Expression[columns.size() - projected.getTable().getPKColumns().size()];
+    	// we do not count minNullableIndex for we might do later merge.
+    	KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(0);
+    	int i = 0;
+        for (PColumn column : projected.getTable().getColumns()) {
+        	if (!SchemaUtil.isPKColumn(column)) {
+        		builder.addField(column);
+        		expressions[i++] = projected.getSourceExpression(column);
+        	}
+        }
+        schema = builder.build();
+        valueSet = ValueBitSet.newInstance(schema);
+    }
+    
+    public TupleProjector(KeyValueSchema schema, Expression[] expressions) {
+    	this.schema = schema;
+    	this.expressions = expressions;
+    	this.valueSet = ValueBitSet.newInstance(schema);
+    }
+    
+    public void setValueBitSet(ValueBitSet bitSet) {
+        this.valueSet = bitSet;
+    }
+    
+    public static void serializeProjectorIntoScan(Scan scan, TupleProjector projector) {
+        ByteArrayOutputStream stream = new ByteArrayOutputStream();
+        try {
+            DataOutputStream output = new DataOutputStream(stream);
+            projector.schema.write(output);
+            int count = projector.expressions.length;
+            WritableUtils.writeVInt(output, count);
+            for (int i = 0; i < count; i++) {
+            	WritableUtils.writeVInt(output, ExpressionType.valueOf(projector.expressions[i]).ordinal());
+            	projector.expressions[i].write(output);
+            }
+            scan.setAttribute(SCAN_PROJECTOR, stream.toByteArray());
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        } finally {
+            try {
+                stream.close();
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+        }
+        
+    }
+    
+    public static TupleProjector deserializeProjectorFromScan(Scan scan) {
+        byte[] proj = scan.getAttribute(SCAN_PROJECTOR);
+        if (proj == null) {
+            return null;
+        }
+        ByteArrayInputStream stream = new ByteArrayInputStream(proj);
+        try {
+            DataInputStream input = new DataInputStream(stream);
+            KeyValueSchema schema = new KeyValueSchema();
+            schema.readFields(input);
+            int count = WritableUtils.readVInt(input);
+            Expression[] expressions = new Expression[count];
+            for (int i = 0; i < count; i++) {
+            	int ordinal = WritableUtils.readVInt(input);
+            	expressions[i] = ExpressionType.values()[ordinal].newInstance();
+            	expressions[i].readFields(input);
+            }
+            return new TupleProjector(schema, expressions);
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        } finally {
+            try {
+                stream.close();
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
+    
+    public static class ProjectedValueTuple extends BaseTuple {
+        private ImmutableBytesWritable keyPtr = new ImmutableBytesWritable();
+        private long timestamp;
+        private byte[] projectedValue;
+        private int bitSetLen;
+        private KeyValue keyValue;
+
+        private ProjectedValueTuple(byte[] keyBuffer, int keyOffset, int keyLength, long timestamp, byte[] projectedValue, int bitSetLen) {
+            this.keyPtr.set(keyBuffer, keyOffset, keyLength);
+            this.timestamp = timestamp;
+            this.projectedValue = projectedValue;
+            this.bitSetLen = bitSetLen;
+        }
+        
+        public ImmutableBytesWritable getKeyPtr() {
+            return keyPtr;
+        }
+        
+        public long getTimestamp() {
+            return timestamp;
+        }
+        
+        public byte[] getProjectedValue() {
+            return projectedValue;
+        }
+        
+        public int getBitSetLength() {
+            return bitSetLen;
+        }
+        
+        @Override
+        public void getKey(ImmutableBytesWritable ptr) {
+            ptr.set(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength());
+        }
+
+        @Override
+        public KeyValue getValue(int index) {
+            if (index != 0) {
+                throw new IndexOutOfBoundsException(Integer.toString(index));
+            }
+            return getValue(VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER);
+        }
+
+        @Override
+        public KeyValue getValue(byte[] family, byte[] qualifier) {
+            if (keyValue == null) {
+                keyValue = KeyValueUtil.newKeyValue(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength(), 
+                        VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER, timestamp, projectedValue, 0, projectedValue.length);
+            }
+            return keyValue;
+        }
+
+        @Override
+        public boolean getValue(byte[] family, byte[] qualifier,
+                ImmutableBytesWritable ptr) {
+            ptr.set(projectedValue);
+            return true;
+        }
+
+        @Override
+        public boolean isImmutable() {
+            return true;
+        }
+
+        @Override
+        public int size() {
+            return 1;
+        }
+    }
+    
+    public ProjectedValueTuple projectResults(Tuple tuple) {
+    	byte[] bytesValue = schema.toBytes(tuple, getExpressions(), valueSet, ptr);
+    	Cell base = tuple.getValue(0);
+        return new ProjectedValueTuple(base.getRowArray(), base.getRowOffset(), base.getRowLength(), base.getTimestamp(), bytesValue, valueSet.getEstimatedLength());
+    }
+    
+    public static void decodeProjectedValue(Tuple tuple, ImmutableBytesWritable ptr) throws IOException {
+    	boolean b = tuple.getValue(VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER, ptr);
+        if (!b)
+            throw new IOException("Trying to decode a non-projected value.");
+    }
+    
+    public static ProjectedValueTuple mergeProjectedValue(ProjectedValueTuple dest, KeyValueSchema destSchema, ValueBitSet destBitSet,
+    		Tuple src, KeyValueSchema srcSchema, ValueBitSet srcBitSet, int offset) throws IOException {
+    	ImmutableBytesWritable destValue = new ImmutableBytesWritable(dest.getProjectedValue());
+    	destBitSet.clear();
+    	destBitSet.or(destValue);
+    	int origDestBitSetLen = dest.getBitSetLength();
+    	ImmutableBytesWritable srcValue = new ImmutableBytesWritable();
+    	decodeProjectedValue(src, srcValue);
+    	srcBitSet.clear();
+    	srcBitSet.or(srcValue);
+    	int origSrcBitSetLen = srcBitSet.getEstimatedLength();
+    	for (int i = 0; i < srcBitSet.getMaxSetBit(); i++) {
+    		if (srcBitSet.get(i)) {
+    			destBitSet.set(offset + i);
+    		}
+    	}
+    	int destBitSetLen = destBitSet.getEstimatedLength();
+    	byte[] merged = new byte[destValue.getLength() - origDestBitSetLen + srcValue.getLength() - origSrcBitSetLen + destBitSetLen];
+    	int o = Bytes.putBytes(merged, 0, destValue.get(), destValue.getOffset(), destValue.getLength() - origDestBitSetLen);
+    	o = Bytes.putBytes(merged, o, srcValue.get(), srcValue.getOffset(), srcValue.getLength() - origSrcBitSetLen);
+    	destBitSet.toBytes(merged, o);
+    	ImmutableBytesWritable keyPtr = dest.getKeyPtr();
+        return new ProjectedValueTuple(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength(), dest.getTimestamp(), merged, destBitSetLen);
+    }
+
+    public KeyValueSchema getSchema() {
+        return schema;
+    }
+
+    public Expression[] getExpressions() {
+        return expressions;
+    }
+
+    public ValueBitSet getValueBitSet() {
+        return valueSet;
+    }
+    
+    @Override
+    public String toString() {
+        return "TUPLE-PROJECTOR {" + Arrays.toString(expressions) + " ==> " + schema.toString() + "}";
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java
index 1c4232a..8a2f6d1 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/ProjectedColumnExpression.java
@@ -23,8 +23,8 @@ import java.io.IOException;
 import java.util.Collection;
 
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.visitor.ExpressionVisitor;
-import org.apache.phoenix.join.TupleProjector;
 import org.apache.phoenix.schema.KeyValueSchema;
 import org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder;
 import org.apache.phoenix.schema.PColumn;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/expression/aggregator/DistinctValueClientAggregator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/expression/aggregator/DistinctValueClientAggregator.java b/phoenix-core/src/main/java/org/apache/phoenix/expression/aggregator/DistinctValueClientAggregator.java
index fefb077..2af99ca 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/expression/aggregator/DistinctValueClientAggregator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/expression/aggregator/DistinctValueClientAggregator.java
@@ -21,7 +21,6 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.schema.PArrayDataType;
 import org.apache.phoenix.schema.PDataType;
-import org.apache.phoenix.schema.PhoenixArray;
 import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.schema.tuple.Tuple;
 
@@ -37,15 +36,15 @@ public class DistinctValueClientAggregator extends DistinctValueWithCountClientA
 
     @Override
     public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
-        if (buffer == null || buffer.length == 0) {            
+        if (cachedResult == null) {            
             Object[] values = new Object[valueVsCount.size()];
             int i = 0;
             for (ImmutableBytesPtr key : valueVsCount.keySet()) {
                 values[i++] = valueType.toObject(key, sortOrder);
             }
-            PhoenixArray array = PArrayDataType.instantiatePhoenixArray(valueType, values);
-            buffer = resultType.toBytes(array, sortOrder);
+            cachedResult = PArrayDataType.instantiatePhoenixArray(valueType, values);
         }
+        buffer = resultType.toBytes(cachedResult, sortOrder);
         ptr.set(buffer);
         return true;
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseGroupedAggregatingResultIterator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseGroupedAggregatingResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseGroupedAggregatingResultIterator.java
new file mode 100644
index 0000000..8fd36b3
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/BaseGroupedAggregatingResultIterator.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.iterate;
+
+import static org.apache.phoenix.query.QueryConstants.AGG_TIMESTAMP;
+import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN;
+import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN_FAMILY;
+
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.phoenix.expression.aggregator.Aggregator;
+import org.apache.phoenix.expression.aggregator.Aggregators;
+import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.util.KeyValueUtil;
+
+/**
+ * 
+ * Base class for result scanners that aggregate the row count value for rows with 
+ * duplicate keys. This result scanner assumes that the results of the inner result 
+ * scanner are returned in order of grouping keys.
+ * 
+ */
+public abstract class BaseGroupedAggregatingResultIterator implements
+        AggregatingResultIterator {
+    private static final byte[] UNITIALIZED_KEY_BUFFER = new byte[0];
+    protected final PeekingResultIterator resultIterator;
+    protected final Aggregators aggregators;
+    private ImmutableBytesWritable currentKey;
+    private ImmutableBytesWritable nextKey;    
+
+    public BaseGroupedAggregatingResultIterator(
+            PeekingResultIterator resultIterator, Aggregators aggregators) {
+        if (resultIterator == null) throw new NullPointerException();
+        if (aggregators == null) throw new NullPointerException();
+        this.resultIterator = resultIterator;
+        this.aggregators = aggregators;
+        this.currentKey = new ImmutableBytesWritable(UNITIALIZED_KEY_BUFFER);
+        this.nextKey = new ImmutableBytesWritable(UNITIALIZED_KEY_BUFFER);        
+    }
+    
+    protected abstract ImmutableBytesWritable getGroupingKey(Tuple tuple, ImmutableBytesWritable ptr) throws SQLException;
+    protected abstract Tuple wrapKeyValueAsResult(KeyValue keyValue) throws SQLException;
+
+    @Override
+    public Tuple next() throws SQLException {
+        Tuple result = resultIterator.next();
+        if (result == null) {
+            return null;
+        }
+        if (currentKey.get() == UNITIALIZED_KEY_BUFFER) {
+            getGroupingKey(result, currentKey);
+        }
+        Aggregator[] rowAggregators = aggregators.getAggregators();
+        aggregators.reset(rowAggregators);
+        while (true) {
+            aggregators.aggregate(rowAggregators, result);
+            Tuple nextResult = resultIterator.peek();
+            if (nextResult == null || !currentKey.equals(getGroupingKey(nextResult, nextKey))) {
+                break;
+            }
+            result = resultIterator.next();
+        }
+        
+        byte[] value = aggregators.toBytes(rowAggregators);
+        Tuple tuple = wrapKeyValueAsResult(KeyValueUtil.newKeyValue(currentKey, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length));
+        currentKey.set(nextKey.get(), nextKey.getOffset(), nextKey.getLength());
+        return tuple;
+    }
+    
+    @Override
+    public void close() throws SQLException {
+        resultIterator.close();
+    }
+    
+    @Override
+    public void aggregate(Tuple result) {
+        Aggregator[] rowAggregators = aggregators.getAggregators();
+        aggregators.reset(rowAggregators);
+        aggregators.aggregate(rowAggregators, result);
+    }
+
+    @Override
+    public void explain(List<String> planSteps) {
+        resultIterator.explain(planSteps);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/iterate/GroupedAggregatingResultIterator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/GroupedAggregatingResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/GroupedAggregatingResultIterator.java
index bb9bf50..1cf9b73 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/GroupedAggregatingResultIterator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/GroupedAggregatingResultIterator.java
@@ -17,19 +17,13 @@
  */
 package org.apache.phoenix.iterate;
 
-import static org.apache.phoenix.query.QueryConstants.*;
-
 import java.sql.SQLException;
-import java.util.List;
 
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-
-import org.apache.phoenix.expression.aggregator.Aggregator;
 import org.apache.phoenix.expression.aggregator.Aggregators;
 import org.apache.phoenix.schema.tuple.SingleKeyValueTuple;
 import org.apache.phoenix.schema.tuple.Tuple;
-import org.apache.phoenix.util.KeyValueUtil;
-import org.apache.phoenix.util.TupleUtil;
 
 
 
@@ -51,61 +45,26 @@ import org.apache.phoenix.util.TupleUtil;
  * 
  * @since 0.1
  */
-public class GroupedAggregatingResultIterator implements AggregatingResultIterator {
-    private final ImmutableBytesWritable tempPtr = new ImmutableBytesWritable();
-    private final PeekingResultIterator resultIterator;
-    protected final Aggregators aggregators;
-    
-    public GroupedAggregatingResultIterator( PeekingResultIterator resultIterator, Aggregators aggregators) {
-        if (resultIterator == null) throw new NullPointerException();
-        if (aggregators == null) throw new NullPointerException();
-        this.resultIterator = resultIterator;
-        this.aggregators = aggregators;
-    }
-    
-    @Override
-    public Tuple next() throws SQLException {
-        Tuple result = resultIterator.next();
-        if (result == null) {
-            return null;
-        }
-        Aggregator[] rowAggregators = aggregators.getAggregators();
-        aggregators.reset(rowAggregators);
-        while (true) {
-            aggregators.aggregate(rowAggregators, result);
-            Tuple nextResult = resultIterator.peek();
-            if (nextResult == null || !TupleUtil.equals(result, nextResult, tempPtr)) {
-                break;
-            }
-            result = resultIterator.next();
-        }
-        
-        byte[] value = aggregators.toBytes(rowAggregators);
-        result.getKey(tempPtr);
-        return new SingleKeyValueTuple(KeyValueUtil.newKeyValue(tempPtr, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length));
-    }
-    
-    @Override
-    public void close() throws SQLException {
-        resultIterator.close();
+public class GroupedAggregatingResultIterator extends BaseGroupedAggregatingResultIterator {
+
+    public GroupedAggregatingResultIterator(PeekingResultIterator resultIterator, Aggregators aggregators) {
+        super(resultIterator, aggregators);
     }
-    
+
     @Override
-    public void aggregate(Tuple result) {
-        Aggregator[] rowAggregators = aggregators.getAggregators();
-        aggregators.reset(rowAggregators);
-        aggregators.aggregate(rowAggregators, result);
+    protected ImmutableBytesWritable getGroupingKey(Tuple tuple, ImmutableBytesWritable ptr) throws SQLException {
+        tuple.getKey(ptr);
+        return ptr;
     }
 
     @Override
-    public void explain(List<String> planSteps) {
-        resultIterator.explain(planSteps);
+    protected Tuple wrapKeyValueAsResult(KeyValue keyValue) throws SQLException {
+        return new SingleKeyValueTuple(keyValue);
     }
 
 	@Override
 	public String toString() {
-		return "GroupedAggregatingResultIterator [tempPtr=" + tempPtr
-				+ ", resultIterator=" + resultIterator + ", aggregators="
-				+ aggregators + "]";
+		return "GroupedAggregatingResultIterator [resultIterator=" 
+		        + resultIterator + ", aggregators=" + aggregators + "]";
 	}
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java
index a7f390f..971b1a5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java
@@ -26,6 +26,10 @@ import org.apache.phoenix.schema.tuple.Tuple;
 
 abstract public class LookAheadResultIterator implements PeekingResultIterator {
     public static LookAheadResultIterator wrap(final ResultIterator iterator) {
+        if (iterator instanceof LookAheadResultIterator) {
+            return (LookAheadResultIterator) iterator;
+        }
+        
         return new LookAheadResultIterator() {
 
             @Override

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/join/TupleProjector.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/join/TupleProjector.java b/phoenix-core/src/main/java/org/apache/phoenix/join/TupleProjector.java
deleted file mode 100644
index e0d9336..0000000
--- a/phoenix-core/src/main/java/org/apache/phoenix/join/TupleProjector.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.join;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.phoenix.compile.JoinCompiler.ProjectedPTableWrapper;
-import org.apache.phoenix.expression.Expression;
-import org.apache.phoenix.expression.ExpressionType;
-import org.apache.phoenix.schema.KeyValueSchema;
-import org.apache.phoenix.schema.KeyValueSchema.KeyValueSchemaBuilder;
-import org.apache.phoenix.schema.PColumn;
-import org.apache.phoenix.schema.ValueBitSet;
-import org.apache.phoenix.schema.tuple.BaseTuple;
-import org.apache.phoenix.schema.tuple.Tuple;
-import org.apache.phoenix.util.KeyValueUtil;
-import org.apache.phoenix.util.SchemaUtil;
-
-public class TupleProjector {    
-    public static final byte[] VALUE_COLUMN_FAMILY = Bytes.toBytes("_v");
-    public static final byte[] VALUE_COLUMN_QUALIFIER = new byte[0];
-    
-    private static final String SCAN_PROJECTOR = "scanProjector";
-    
-    private final KeyValueSchema schema;
-    private final Expression[] expressions;
-    private ValueBitSet valueSet;
-    private final ImmutableBytesWritable ptr = new ImmutableBytesWritable();
-    
-    public TupleProjector(ProjectedPTableWrapper projected) {
-    	List<PColumn> columns = projected.getTable().getColumns();
-    	expressions = new Expression[columns.size() - projected.getTable().getPKColumns().size()];
-    	// we do not count minNullableIndex for we might do later merge.
-    	KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(0);
-    	int i = 0;
-        for (PColumn column : projected.getTable().getColumns()) {
-        	if (!SchemaUtil.isPKColumn(column)) {
-        		builder.addField(column);
-        		expressions[i++] = projected.getSourceExpression(column);
-        	}
-        }
-        schema = builder.build();
-        valueSet = ValueBitSet.newInstance(schema);
-    }
-    
-    public TupleProjector(KeyValueSchema schema, Expression[] expressions) {
-    	this.schema = schema;
-    	this.expressions = expressions;
-    	this.valueSet = ValueBitSet.newInstance(schema);
-    }
-    
-    public void setValueBitSet(ValueBitSet bitSet) {
-        this.valueSet = bitSet;
-    }
-    
-    public static void serializeProjectorIntoScan(Scan scan, TupleProjector projector) {
-        ByteArrayOutputStream stream = new ByteArrayOutputStream();
-        try {
-            DataOutputStream output = new DataOutputStream(stream);
-            projector.schema.write(output);
-            int count = projector.expressions.length;
-            WritableUtils.writeVInt(output, count);
-            for (int i = 0; i < count; i++) {
-            	WritableUtils.writeVInt(output, ExpressionType.valueOf(projector.expressions[i]).ordinal());
-            	projector.expressions[i].write(output);
-            }
-            scan.setAttribute(SCAN_PROJECTOR, stream.toByteArray());
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        } finally {
-            try {
-                stream.close();
-            } catch (IOException e) {
-                throw new RuntimeException(e);
-            }
-        }
-        
-    }
-    
-    public static TupleProjector deserializeProjectorFromScan(Scan scan) {
-        byte[] proj = scan.getAttribute(SCAN_PROJECTOR);
-        if (proj == null) {
-            return null;
-        }
-        ByteArrayInputStream stream = new ByteArrayInputStream(proj);
-        try {
-            DataInputStream input = new DataInputStream(stream);
-            KeyValueSchema schema = new KeyValueSchema();
-            schema.readFields(input);
-            int count = WritableUtils.readVInt(input);
-            Expression[] expressions = new Expression[count];
-            for (int i = 0; i < count; i++) {
-            	int ordinal = WritableUtils.readVInt(input);
-            	expressions[i] = ExpressionType.values()[ordinal].newInstance();
-            	expressions[i].readFields(input);
-            }
-            return new TupleProjector(schema, expressions);
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        } finally {
-            try {
-                stream.close();
-            } catch (IOException e) {
-                throw new RuntimeException(e);
-            }
-        }
-    }
-    
-    public static class ProjectedValueTuple extends BaseTuple {
-        private ImmutableBytesWritable keyPtr = new ImmutableBytesWritable();
-        private long timestamp;
-        private byte[] projectedValue;
-        private int bitSetLen;
-        private KeyValue keyValue;
-
-        private ProjectedValueTuple(byte[] keyBuffer, int keyOffset, int keyLength, long timestamp, byte[] projectedValue, int bitSetLen) {
-            this.keyPtr.set(keyBuffer, keyOffset, keyLength);
-            this.timestamp = timestamp;
-            this.projectedValue = projectedValue;
-            this.bitSetLen = bitSetLen;
-        }
-        
-        public ImmutableBytesWritable getKeyPtr() {
-            return keyPtr;
-        }
-        
-        public long getTimestamp() {
-            return timestamp;
-        }
-        
-        public byte[] getProjectedValue() {
-            return projectedValue;
-        }
-        
-        public int getBitSetLength() {
-            return bitSetLen;
-        }
-        
-        @Override
-        public void getKey(ImmutableBytesWritable ptr) {
-            ptr.set(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength());
-        }
-
-        @Override
-        public KeyValue getValue(int index) {
-            if (index != 0) {
-                throw new IndexOutOfBoundsException(Integer.toString(index));
-            }
-            return getValue(VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER);
-        }
-
-        @Override
-        public KeyValue getValue(byte[] family, byte[] qualifier) {
-            if (keyValue == null) {
-                keyValue = KeyValueUtil.newKeyValue(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength(), 
-                        VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER, timestamp, projectedValue, 0, projectedValue.length);
-            }
-            return keyValue;
-        }
-
-        @Override
-        public boolean getValue(byte[] family, byte[] qualifier,
-                ImmutableBytesWritable ptr) {
-            ptr.set(projectedValue);
-            return true;
-        }
-
-        @Override
-        public boolean isImmutable() {
-            return true;
-        }
-
-        @Override
-        public int size() {
-            return 1;
-        }
-    }
-    
-    public ProjectedValueTuple projectResults(Tuple tuple) {
-    	byte[] bytesValue = schema.toBytes(tuple, getExpressions(), valueSet, ptr);
-    	Cell base = tuple.getValue(0);
-        return new ProjectedValueTuple(base.getRowArray(), base.getRowOffset(), base.getRowLength(), base.getTimestamp(), bytesValue, valueSet.getEstimatedLength());
-    }
-    
-    public static void decodeProjectedValue(Tuple tuple, ImmutableBytesWritable ptr) throws IOException {
-    	boolean b = tuple.getValue(VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER, ptr);
-        if (!b)
-            throw new IOException("Trying to decode a non-projected value.");
-    }
-    
-    public static ProjectedValueTuple mergeProjectedValue(ProjectedValueTuple dest, KeyValueSchema destSchema, ValueBitSet destBitSet,
-    		Tuple src, KeyValueSchema srcSchema, ValueBitSet srcBitSet, int offset) throws IOException {
-    	ImmutableBytesWritable destValue = new ImmutableBytesWritable(dest.getProjectedValue());
-    	destBitSet.clear();
-    	destBitSet.or(destValue);
-    	int origDestBitSetLen = dest.getBitSetLength();
-    	ImmutableBytesWritable srcValue = new ImmutableBytesWritable();
-    	decodeProjectedValue(src, srcValue);
-    	srcBitSet.clear();
-    	srcBitSet.or(srcValue);
-    	int origSrcBitSetLen = srcBitSet.getEstimatedLength();
-    	for (int i = 0; i < srcBitSet.getMaxSetBit(); i++) {
-    		if (srcBitSet.get(i)) {
-    			destBitSet.set(offset + i);
-    		}
-    	}
-    	int destBitSetLen = destBitSet.getEstimatedLength();
-    	byte[] merged = new byte[destValue.getLength() - origDestBitSetLen + srcValue.getLength() - origSrcBitSetLen + destBitSetLen];
-    	int o = Bytes.putBytes(merged, 0, destValue.get(), destValue.getOffset(), destValue.getLength() - origDestBitSetLen);
-    	o = Bytes.putBytes(merged, o, srcValue.get(), srcValue.getOffset(), srcValue.getLength() - origSrcBitSetLen);
-    	destBitSet.toBytes(merged, o);
-    	ImmutableBytesWritable keyPtr = dest.getKeyPtr();
-        return new ProjectedValueTuple(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength(), dest.getTimestamp(), merged, destBitSetLen);
-    }
-
-    public KeyValueSchema getSchema() {
-        return schema;
-    }
-
-    public Expression[] getExpressions() {
-        return expressions;
-    }
-
-    public ValueBitSet getValueBitSet() {
-        return valueSet;
-    }
-    
-    @Override
-    public String toString() {
-        return "TUPLE-PROJECTOR {" + Arrays.toString(expressions) + " ==> " + schema.toString() + "}";
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java b/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
index 6a68df3..f027ab3 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
@@ -102,6 +102,7 @@ public class QueryOptimizer {
         if (!useIndexes 
                 || select.isJoin() 
                 || dataPlan.getContext().getResolver().getTables().size() > 1
+                || select.getInnerSelectStatement() != null
                 || (dataPlan.getContext().getScanRanges().isPointLookup() && stopAtBestPlan)) {
             return Collections.singletonList(dataPlan);
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
index 4f5e7ae..6f8339e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
@@ -630,8 +630,8 @@ public class ParseNodeFactory {
                 statement.hasSequence());
     }
 
-    public SelectStatement select(SelectStatement statement, List<AliasedNode> select, ParseNode where, List<ParseNode> groupBy, boolean isAggregate) {
-        return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), select, where, groupBy,
+    public SelectStatement select(SelectStatement statement, boolean isDistinct, List<AliasedNode> select, ParseNode where, List<ParseNode> groupBy, boolean isAggregate) {
+        return select(statement.getFrom(), statement.getHint(), isDistinct, select, where, groupBy,
                 statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getBindCount(), isAggregate,
                 statement.hasSequence());
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
index 6cee588..e7302dc 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
@@ -192,4 +192,11 @@ public class SelectStatement implements FilterableStatement {
     public boolean isJoin() {
         return fromTable.size() > 1 || (fromTable.size() > 0 && fromTable.get(0) instanceof JoinTableNode);
     }
+    
+    public SelectStatement getInnerSelectStatement() {
+        if (fromTable.size() != 1 || !(fromTable.get(0) instanceof DerivedTableNode))
+            return null;
+        
+        return ((DerivedTableNode) fromTable.get(0)).getSelect();
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/schema/ColumnRef.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/schema/ColumnRef.java b/phoenix-core/src/main/java/org/apache/phoenix/schema/ColumnRef.java
index 7a00082..d5ccc5c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/schema/ColumnRef.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/schema/ColumnRef.java
@@ -123,7 +123,7 @@ public class ColumnRef {
         // use ProjectedColumnExpression, but not sure. The column values from the data
         // table should get returned in a single KeyValue in a similar format (using a
         // KeyValueSchema).
-        if (table.getType() == PTableType.JOIN) {
+        if (table.getType() == PTableType.JOIN || table.getType() == PTableType.SUBQUERY) {
         	return new ProjectedColumnExpression(column, table, column.getName().getString());
         }
        

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
index 9aa1b83..85b5ac5 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/IndexUtil.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.io.WritableUtils;
 import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.KeyValueColumnExpression;
 import org.apache.phoenix.expression.RowKeyColumnExpression;
@@ -51,7 +52,6 @@ import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.hbase.index.util.KeyValueBuilder;
 import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.jdbc.PhoenixConnection;
-import org.apache.phoenix.join.TupleProjector;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.ColumnFamilyNotFoundException;
 import org.apache.phoenix.schema.ColumnNotFoundException;


[3/5] git commit: PHOENIX-944 Support derived tables in FROM clause that needs extra steps of client-side aggregation or other processing

Posted by ma...@apache.org.
PHOENIX-944 Support derived tables in FROM clause that needs extra steps of client-side aggregation or other processing


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/301f87ea
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/301f87ea
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/301f87ea

Branch: refs/heads/4.0
Commit: 301f87ea2d3f2f8749010592f225f71924ab3c16
Parents: f004e13
Author: maryannxue <ma...@apache.org>
Authored: Thu Oct 23 20:26:17 2014 -0400
Committer: maryannxue <ma...@apache.org>
Committed: Thu Oct 23 23:26:49 2014 -0400

----------------------------------------------------------------------
 .../main/java/org/apache/phoenix/compile/QueryCompiler.java    | 4 ++++
 .../java/org/apache/phoenix/execute/ClientAggregatePlan.java   | 4 +++-
 .../org/apache/phoenix/iterate/LookAheadResultIterator.java    | 6 +++---
 3 files changed, 10 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/301f87ea/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
index 0eafcdb..214330c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
@@ -43,6 +43,7 @@ import org.apache.phoenix.execute.ScanPlan;
 import org.apache.phoenix.execute.TupleProjectionPlan;
 import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.LiteralExpression;
 import org.apache.phoenix.expression.RowValueConstructorExpression;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.iterate.ParallelIterators.ParallelIteratorFactory;
@@ -427,6 +428,9 @@ public class QueryCompiler {
         }
         
         if (innerPlan != null) {
+            if (LiteralExpression.isTrue(where)) {
+                where = null; // we do not pass "true" as filter
+            }
             plan =  select.isAggregate() || select.isDistinct() ?
                       new ClientAggregatePlan(context, select, tableRef, projector, limit, where, orderBy, groupBy, having, plan)
                     : new ClientScanPlan(context, select, tableRef, projector, limit, where, orderBy, plan);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/301f87ea/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java
index a9347e1..59aab2d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java
@@ -17,6 +17,8 @@
  */
 package org.apache.phoenix.execute;
 
+import static org.apache.phoenix.query.QueryConstants.*;
+
 import java.io.IOException;
 import java.sql.SQLException;
 import java.util.Collections;
@@ -210,7 +212,7 @@ public class ClientAggregatePlan extends ClientProcessingPlan {
         @Override
         protected ImmutableBytesWritable getGroupingKey(Tuple tuple,
                 ImmutableBytesWritable ptr) throws SQLException {
-            tuple.getKey(ptr);
+            ptr.set(UNGROUPED_AGG_ROW_KEY);
             return ptr;
         }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/301f87ea/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java
index 971b1a5..3293f65 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/LookAheadResultIterator.java
@@ -25,9 +25,9 @@ import org.apache.phoenix.schema.tuple.Tuple;
 
 
 abstract public class LookAheadResultIterator implements PeekingResultIterator {
-    public static LookAheadResultIterator wrap(final ResultIterator iterator) {
-        if (iterator instanceof LookAheadResultIterator) {
-            return (LookAheadResultIterator) iterator;
+    public static PeekingResultIterator wrap(final ResultIterator iterator) {
+        if (iterator instanceof PeekingResultIterator) {
+            return (PeekingResultIterator) iterator;
         }
         
         return new LookAheadResultIterator() {


[4/5] git commit: Merge branch '4.0' of https://git-wip-us.apache.org/repos/asf/phoenix into 4.0

Posted by ma...@apache.org.
Merge branch '4.0' of https://git-wip-us.apache.org/repos/asf/phoenix into 4.0


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/e1377116
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/e1377116
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/e1377116

Branch: refs/heads/4.0
Commit: e13771165b9cf445a932e83d955055be24edcc5d
Parents: 301f87e 77f2405
Author: maryannxue <ma...@apache.org>
Authored: Sat Nov 1 14:49:43 2014 -0400
Committer: maryannxue <ma...@apache.org>
Committed: Sat Nov 1 14:49:43 2014 -0400

----------------------------------------------------------------------
 CHANGES                                         |  14 +
 bin/phoenix_utils.py                            |   2 +-
 bin/psql.py                                     |   8 +-
 phoenix-assembly/pom.xml                        |  10 +-
 phoenix-core/pom.xml                            |  23 +-
 .../apache/phoenix/end2end/AlterTableIT.java    |  22 +-
 .../end2end/BaseClientManagedTimeIT.java        |  15 +-
 .../org/apache/phoenix/end2end/BaseQueryIT.java |   3 +-
 .../end2end/ClientTimeArithmeticQueryIT.java    |  43 +++
 .../phoenix/end2end/InMemoryOrderByIT.java      |   4 +-
 .../org/apache/phoenix/end2end/QueryIT.java     |  44 ++-
 .../apache/phoenix/end2end/ReverseScanIT.java   |   2 +-
 .../org/apache/phoenix/end2end/SequenceIT.java  |   7 +-
 .../end2end/SkipScanAfterManualSplitIT.java     |   2 +-
 .../phoenix/end2end/SpooledOrderByIT.java       |   4 +-
 .../phoenix/end2end/StatsCollectorIT.java       |  55 ++-
 .../end2end/TenantSpecificTablesDDLIT.java      |  23 +-
 .../apache/phoenix/end2end/UpsertSelectIT.java  |   4 +-
 .../phoenix/end2end/index/LocalIndexIT.java     | 176 +++++++++-
 .../phoenix/end2end/index/MutableIndexIT.java   |  15 +-
 .../apache/phoenix/trace/BaseTracingTestIT.java | 100 +++---
 .../phoenix/trace/DisableableMetricsWriter.java |  27 +-
 .../trace/Hadoop1TracingTestEnabler.java        |  84 -----
 .../apache/phoenix/trace/PhoenixMetricImpl.java |  44 ---
 .../phoenix/trace/PhoenixMetricRecordImpl.java  |  71 ----
 .../trace/PhoenixTableMetricsWriterIT.java      |  28 +-
 .../apache/phoenix/trace/PhoenixTagImpl.java    |  29 +-
 .../phoenix/trace/PhoenixTraceReaderIT.java     |  61 ++--
 .../phoenix/trace/PhoenixTracingEndToEndIT.java |  59 ++--
 .../apache/phoenix/trace/TracingTestUtil.java   |  31 ++
 .../IndexHalfStoreFileReaderGenerator.java      |  63 ++++
 .../hbase/regionserver/LocalIndexSplitter.java  |  40 +++
 .../phoenix/compile/ExpressionCompiler.java     |   2 +-
 .../phoenix/compile/ProjectionCompiler.java     |  50 ++-
 .../coprocessor/MetaDataEndpointImpl.java       |  43 +--
 .../phoenix/coprocessor/MetaDataProtocol.java   |   2 +-
 .../UngroupedAggregateRegionObserver.java       |  21 +-
 .../coprocessor/generated/MetaDataProtos.java   | 351 +++++++++----------
 .../coprocessor/generated/PTableProtos.java     | 103 +++++-
 .../apache/phoenix/execute/BaseQueryPlan.java   |   6 +
 .../phoenix/filter/BooleanExpressionFilter.java |   6 +
 .../org/apache/phoenix/hbase/index/Indexer.java |   4 +-
 .../hbase/index/wal/IndexedKeyValue.java        |  46 ++-
 .../TrackingParallelWriterIndexCommitter.java   |   2 +-
 .../apache/phoenix/iterate/ExplainTable.java    |  17 +-
 .../phoenix/iterate/ParallelIterators.java      |   2 +-
 .../apache/phoenix/jdbc/PhoenixConnection.java  |   8 +-
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |  55 ++-
 .../apache/phoenix/jdbc/PhoenixResultSet.java   |   5 -
 .../org/apache/phoenix/metrics/MetricInfo.java  |  51 +++
 .../org/apache/phoenix/metrics/Metrics.java     |  66 ++++
 .../java/org/apache/phoenix/parse/HintNode.java |   9 +-
 .../phoenix/query/ConnectionQueryServices.java  |   2 +-
 .../query/ConnectionQueryServicesImpl.java      |  20 +-
 .../query/ConnectionlessQueryServicesImpl.java  |   8 +-
 .../query/DelegateConnectionQueryServices.java  |  10 +-
 .../apache/phoenix/query/MetaDataMutated.java   |   2 +-
 .../org/apache/phoenix/query/QueryServices.java |   1 +
 .../phoenix/query/QueryServicesOptions.java     |   5 +-
 .../apache/phoenix/schema/MetaDataClient.java   |  12 +-
 .../apache/phoenix/schema/PMetaDataImpl.java    |  53 +--
 .../org/apache/phoenix/schema/PTableImpl.java   |   5 +-
 .../phoenix/schema/stats/PTableStats.java       |   7 +
 .../phoenix/schema/stats/PTableStatsImpl.java   |  12 +-
 .../schema/stats/StatisticsCollector.java       |  79 +++--
 .../phoenix/schema/stats/StatisticsScanner.java |   1 -
 .../phoenix/schema/stats/StatisticsUtil.java    |   6 +-
 .../phoenix/schema/stats/StatisticsWriter.java  |  39 ++-
 .../phoenix/schema/tuple/ResultTuple.java       |   2 +-
 .../apache/phoenix/trace/MetricsInfoImpl.java   |  63 ++++
 .../phoenix/trace/PhoenixMetricsSink.java       | 298 ++++++++++++++++
 .../trace/PhoenixTableMetricsWriter.java        | 278 ---------------
 .../apache/phoenix/trace/TraceMetricSource.java | 188 ++++++++++
 .../org/apache/phoenix/trace/TraceReader.java   |  12 +-
 .../org/apache/phoenix/trace/TracingUtils.java  |  58 +++
 .../org/apache/phoenix/trace/util/Tracing.java  |   5 +-
 .../metrics2/impl/ExposedMetricCounterLong.java |  36 ++
 .../metrics2/impl/ExposedMetricsRecordImpl.java |  42 +++
 .../metrics2/lib/ExposedMetricsInfoImpl.java    |  34 ++
 .../org/apache/phoenix/metrics/LoggingSink.java |  60 ++++
 .../query/BaseConnectionlessQueryTest.java      |   2 +
 .../java/org/apache/phoenix/query/BaseTest.java |   1 +
 .../org/apache/phoenix/query/QueryPlanTest.java |   8 +
 .../phoenix/trace/TraceMetricsSourceTest.java   |  96 +++++
 phoenix-hadoop-compat/pom.xml                   |  89 -----
 .../org/apache/phoenix/metrics/MetricInfo.java  |  51 ---
 .../org/apache/phoenix/metrics/Metrics.java     |  80 -----
 .../apache/phoenix/metrics/MetricsManager.java  |  58 ---
 .../apache/phoenix/metrics/MetricsWriter.java   |  31 --
 .../phoenix/metrics/PhoenixAbstractMetric.java  |  30 --
 .../phoenix/metrics/PhoenixMetricTag.java       |  27 --
 .../phoenix/metrics/PhoenixMetricsRecord.java   |  35 --
 .../phoenix/trace/PhoenixSpanReceiver.java      |  26 --
 .../phoenix/trace/TestableMetricsWriter.java    |  30 --
 .../org/apache/phoenix/trace/TracingCompat.java |  89 -----
 .../org/apache/phoenix/metrics/LoggingSink.java |  56 ---
 .../phoenix/metrics/TracingTestCompat.java      |  45 ---
 phoenix-hadoop2-compat/pom.xml                  |  77 ----
 .../phoenix/metrics/MetricsManagerImpl.java     |  71 ----
 .../apache/phoenix/trace/MetricsInfoImpl.java   |  63 ----
 .../phoenix/trace/PhoenixMetricsSink.java       | 191 ----------
 .../apache/phoenix/trace/TraceMetricSource.java | 197 -----------
 .../org.apache.phoenix.metrics.MetricsManager   |   1 -
 ...org.apache.phoenix.trace.PhoenixSpanReceiver |   1 -
 ...g.apache.phoenix.trace.TestableMetricsWriter |   1 -
 .../metrics2/impl/ExposedMetricCounterLong.java |  35 --
 .../metrics2/impl/ExposedMetricsRecordImpl.java |  43 ---
 .../metrics2/lib/ExposedMetricsInfoImpl.java    |  32 --
 .../phoenix/trace/PhoenixMetricsWriterTest.java | 142 --------
 .../phoenix/trace/TraceMetricsSourceTest.java   |  96 -----
 .../org/apache/phoenix/trace/TracingTest.java   |  34 --
 phoenix-protocol/src/main/MetaDataService.proto |   8 +-
 phoenix-protocol/src/main/PTable.proto          |   1 +
 pom.xml                                         |  27 --
 114 files changed, 2278 insertions(+), 2796 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/e1377116/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
----------------------------------------------------------------------
diff --cc phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
index d915948,aba35fe..5131bbc
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
@@@ -71,7 -70,9 +71,8 @@@ import org.apache.phoenix.index.IndexMa
  import org.apache.phoenix.index.PhoenixIndexCodec;
  import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
  import org.apache.phoenix.join.HashJoinInfo;
 -import org.apache.phoenix.join.TupleProjector;
  import org.apache.phoenix.query.QueryConstants;
+ import org.apache.phoenix.query.QueryServices;
  import org.apache.phoenix.query.QueryServicesOptions;
  import org.apache.phoenix.schema.ConstraintViolationException;
  import org.apache.phoenix.schema.PColumn;


[5/5] git commit: PHOENIX-1399 Support cross joins and comma-separated table list

Posted by ma...@apache.org.
PHOENIX-1399 Support cross joins and comma-separated table list


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/91e29c60
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/91e29c60
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/91e29c60

Branch: refs/heads/4.0
Commit: 91e29c60d7433352de9a350b7cfde1c67ef952ac
Parents: e137711
Author: maryannxue <ma...@apache.org>
Authored: Sat Nov 1 14:59:52 2014 -0400
Committer: maryannxue <ma...@apache.org>
Committed: Sat Nov 1 14:59:52 2014 -0400

----------------------------------------------------------------------
 .../org/apache/phoenix/end2end/HashJoinIT.java  |  70 +++++-
 phoenix-core/src/main/antlr3/PhoenixSQL.g       |  30 +--
 .../apache/phoenix/compile/DeleteCompiler.java  |   2 +-
 .../apache/phoenix/compile/FromCompiler.java    |  10 +-
 .../apache/phoenix/compile/JoinCompiler.java    | 231 ++++++++++---------
 .../apache/phoenix/compile/QueryCompiler.java   |   4 +-
 .../phoenix/compile/StatementNormalizer.java    |  12 +-
 .../phoenix/compile/SubqueryRewriter.java       |   6 +-
 .../phoenix/compile/SubselectRewriter.java      |   6 +-
 .../apache/phoenix/compile/UpsertCompiler.java  |   2 +-
 .../phoenix/exception/SQLExceptionCode.java     |   1 +
 .../apache/phoenix/jdbc/PhoenixStatement.java   |   4 +-
 .../apache/phoenix/optimize/QueryOptimizer.java |   4 +-
 .../org/apache/phoenix/parse/JoinPartNode.java  |  53 -----
 .../apache/phoenix/parse/ParseNodeFactory.java  |  21 +-
 .../apache/phoenix/parse/ParseNodeRewriter.java |  25 +-
 .../apache/phoenix/parse/SelectStatement.java   |  18 +-
 17 files changed, 239 insertions(+), 260 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
index 3850ac9..5190c18 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/HashJoinIT.java
@@ -41,7 +41,6 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
 import java.sql.Timestamp;
 import java.text.SimpleDateFormat;
 import java.util.Collection;
@@ -1810,13 +1809,22 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
     
     @Test
     public void testStarJoin() throws Exception {
-        String[] query = new String[2];
+        String[] query = new String[5];
         query[0] = "SELECT \"order_id\", c.name, i.name iname, quantity, o.date FROM " + JOIN_ORDER_TABLE_FULL_NAME + " o JOIN " 
             + JOIN_CUSTOMER_TABLE_FULL_NAME + " c ON o.\"customer_id\" = c.\"customer_id\" JOIN " 
             + JOIN_ITEM_TABLE_FULL_NAME + " i ON o.\"item_id\" = i.\"item_id\" ORDER BY \"order_id\"";
-        query[1] = "SELECT /*+ NO_STAR_JOIN*/ \"order_id\", c.name, i.name iname, quantity, o.date FROM " + JOIN_ORDER_TABLE_FULL_NAME + " o JOIN " 
+        query[1] = "SELECT \"order_id\", c.name, i.name iname, quantity, o.date FROM " + JOIN_ORDER_TABLE_FULL_NAME + " o, " 
+                + JOIN_CUSTOMER_TABLE_FULL_NAME + " c, " 
+                + JOIN_ITEM_TABLE_FULL_NAME + " i WHERE o.\"item_id\" = i.\"item_id\" AND o.\"customer_id\" = c.\"customer_id\" ORDER BY \"order_id\"";
+        query[2] = "SELECT /*+ NO_STAR_JOIN*/ \"order_id\", c.name, i.name iname, quantity, o.date FROM " + JOIN_ORDER_TABLE_FULL_NAME + " o JOIN " 
                 + JOIN_CUSTOMER_TABLE_FULL_NAME + " c ON o.\"customer_id\" = c.\"customer_id\" JOIN " 
                 + JOIN_ITEM_TABLE_FULL_NAME + " i ON o.\"item_id\" = i.\"item_id\" ORDER BY \"order_id\"";
+        query[3] = "SELECT /*+ NO_STAR_JOIN*/  \"order_id\", c.name, i.name iname, quantity, o.date FROM (" + JOIN_ORDER_TABLE_FULL_NAME + " o, " 
+                + JOIN_CUSTOMER_TABLE_FULL_NAME + " c), " 
+                + JOIN_ITEM_TABLE_FULL_NAME + " i WHERE o.\"item_id\" = i.\"item_id\" AND o.\"customer_id\" = c.\"customer_id\" ORDER BY \"order_id\"";
+        query[4] = "SELECT \"order_id\", c.name, i.name iname, quantity, o.date FROM " + JOIN_ORDER_TABLE_FULL_NAME + " o, (" 
+                + JOIN_CUSTOMER_TABLE_FULL_NAME + " c, " 
+                + JOIN_ITEM_TABLE_FULL_NAME + " i) WHERE o.\"item_id\" = i.\"item_id\" AND o.\"customer_id\" = c.\"customer_id\" ORDER BY \"order_id\"";
         Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
         Connection conn = DriverManager.getConnection(getUrl(), props);
         try {
@@ -1860,8 +1868,10 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
 
                 assertFalse(rs.next());
                 
-                rs = conn.createStatement().executeQuery("EXPLAIN " + query[i]);
-                assertEquals(plans[11 + i], QueryUtil.getExplainPlan(rs));
+                if (i < 4) {
+                    rs = conn.createStatement().executeQuery("EXPLAIN " + query[i]);
+                    assertEquals(plans[11 + (i/2)], QueryUtil.getExplainPlan(rs));
+                }
             }
         } finally {
             conn.close();
@@ -3828,17 +3838,56 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
     }
 
     @Test
-    public void testUnsupportedJoinConditions() throws Exception {
-        String query = "SELECT * FROM " + JOIN_ITEM_TABLE_FULL_NAME + " item JOIN " + JOIN_SUPPLIER_TABLE_FULL_NAME + " supp ON (item.\"supplier_id\" || supp.\"supplier_id\") = ''";
+    public void testNonEquiJoin() throws Exception {
         Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
         Connection conn = DriverManager.getConnection(getUrl(), props);
         try {
+            String query = "SELECT item.name, supp.name FROM " + JOIN_ITEM_TABLE_FULL_NAME + " item, " + JOIN_SUPPLIER_TABLE_FULL_NAME + " supp WHERE item.\"supplier_id\" > supp.\"supplier_id\"";
             PreparedStatement statement = conn.prepareStatement(query);
+            ResultSet rs = statement.executeQuery();
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T3");
+            assertEquals(rs.getString(2), "S1");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T4");
+            assertEquals(rs.getString(2), "S1");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T5");
+            assertEquals(rs.getString(2), "S1");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T5");
+            assertEquals(rs.getString(2), "S2");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T5");
+            assertEquals(rs.getString(2), "S3");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T5");
+            assertEquals(rs.getString(2), "S4");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T6");
+            assertEquals(rs.getString(2), "S1");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T6");
+            assertEquals(rs.getString(2), "S2");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T6");
+            assertEquals(rs.getString(2), "S3");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T6");
+            assertEquals(rs.getString(2), "S4");
+            assertTrue(rs.next());
+            assertEquals(rs.getString(1), "T6");
+            assertEquals(rs.getString(2), "S5");
+
+            assertFalse(rs.next());
+            
+            query = "SELECT item.name, supp.name FROM " + JOIN_ITEM_TABLE_FULL_NAME + " item JOIN " + JOIN_SUPPLIER_TABLE_FULL_NAME + " supp ON item.\"supplier_id\" > supp.\"supplier_id\"";
+            statement = conn.prepareStatement(query);
             try {
                 statement.executeQuery();
-                fail("Should have got SQLFeatureNotSupportedException.");
-            } catch (SQLFeatureNotSupportedException e) {
-                assertEquals("Does not support non-standard or non-equi join conditions.", e.getMessage());
+                fail("Should have got SQLException.");
+            } catch (SQLException e) {
+                assertEquals(SQLExceptionCode.AMBIGUOUS_JOIN_CONDITION.getErrorCode(), e.getErrorCode());
             }
         } finally {
             conn.close();
@@ -3847,3 +3896,4 @@ public class HashJoinIT extends BaseHBaseManagedTimeIT {
 
 }
 
+

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/antlr3/PhoenixSQL.g
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g b/phoenix-core/src/main/antlr3/PhoenixSQL.g
index e2636fb..aa7fdc6 100644
--- a/phoenix-core/src/main/antlr3/PhoenixSQL.g
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -647,31 +647,25 @@ parseOrderByField returns [OrderByNode ret]
         { $ret = factory.orderBy(expr, nullsLast, isAscending); }
     ;
 
-parseFrom returns [List<TableNode> ret]
-@init{ret = new ArrayList<TableNode>(4); }
-    :   t=table_ref {$ret.add(t);} (COMMA s=table_ref { $ret.add(s); })*
+parseFrom returns [TableNode ret]
+    :   t=table_list {$ret = t;}
+    ;
+    
+table_list returns [TableNode ret]
+    :   t=table_ref {$ret = t;} (COMMA s=table_ref { $ret = factory.join(JoinTableNode.JoinType.Inner, ret, s, null, false); })*
     ;
 
 table_ref returns [TableNode ret]
-    : t=single_table_ref p=join_parts { $ret = factory.table(t, p); }
-    ;
+	:	l=table_factor { $ret = l; } (j=join_type JOIN r=table_factor ON e=expression { $ret = factory.join(j, ret, r, e, false); })*
+	;
 
-single_table_ref returns [TableNode ret]
-    :   n=bind_name ((AS)? alias=identifier)? { $ret = factory.bindTable(alias, factory.table(null,n)); } // TODO: review
-    |   t=from_table_name ((AS)? alias=identifier)? (LPAREN cdefs=dyn_column_defs RPAREN)? { $ret = factory.namedTable(alias,t,cdefs); }
+table_factor returns [TableNode ret]
+    :   LPAREN t=table_list RPAREN { $ret = t; }
+    |   n=bind_name ((AS)? alias=identifier)? { $ret = factory.bindTable(alias, factory.table(null,n)); } // TODO: review
+    |   f=from_table_name ((AS)? alias=identifier)? (LPAREN cdefs=dyn_column_defs RPAREN)? { $ret = factory.namedTable(alias,f,cdefs); }
     |   LPAREN SELECT s=hinted_select_node RPAREN ((AS)? alias=identifier)? { $ret = factory.derivedTable(alias, s); }
     ;
 
-join_parts returns [List<JoinPartNode> ret]
-@init{ret = new ArrayList<JoinPartNode>(4); }
-	:	(p=join_part { $ret.add(p); })*
-	;
-
-join_part returns [JoinPartNode ret]
-	:	j=join_type JOIN r=single_table_ref ON e=expression { $ret = factory.joinPart(j, e, r); }
-	|	j=join_type JOIN LPAREN r=table_ref RPAREN ON e=expression { $ret = factory.joinPart(j, e, r); }
-	;
-
 join_type returns [JoinTableNode.JoinType ret]
     :   INNER?   { $ret = JoinTableNode.JoinType.Inner; }
     |   LEFT OUTER?   { $ret = JoinTableNode.JoinType.Left; }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
index 1331a2a..6638819 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/DeleteCompiler.java
@@ -315,7 +315,7 @@ public class DeleteCompiler {
                     aliasedNodes.add(FACTORY.aliasedNode(null, FACTORY.column(null, '"' + column.getName().getString() + '"', null)));
                 }
                 select = FACTORY.select(
-                        Collections.singletonList(delete.getTable()), 
+                        delete.getTable(), 
                         hint, false, aliasedNodes, delete.getWhere(), 
                         Collections.<ParseNode>emptyList(), null, 
                         delete.getOrderBy(), delete.getLimit(),

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
index 1627f45..b9246c9 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
@@ -155,14 +155,12 @@ public class FromCompiler {
      */
     public static ColumnResolver getResolverForQuery(SelectStatement statement, PhoenixConnection connection)
     		throws SQLException {
-    	List<TableNode> fromNodes = statement.getFrom();
-        if (!statement.isJoin() && fromNodes.get(0) instanceof NamedTableNode)
-            return new SingleTableColumnResolver(connection, (NamedTableNode) fromNodes.get(0), true, 1);
+    	TableNode fromNode = statement.getFrom();
+        if (fromNode instanceof NamedTableNode)
+            return new SingleTableColumnResolver(connection, (NamedTableNode) fromNode, true, 1);
         
         MultiTableColumnResolver visitor = new MultiTableColumnResolver(connection, 1);
-        for (TableNode node : fromNodes) {
-            node.accept(visitor);
-        }
+        fromNode.accept(visitor);
         return visitor;
     }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
index 140146c..b519dc4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
@@ -28,6 +28,7 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.ListIterator;
 import java.util.Map;
 import java.util.Set;
 
@@ -39,6 +40,7 @@ import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.AndExpression;
 import org.apache.phoenix.expression.CoerceExpression;
 import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.LiteralExpression;
 import org.apache.phoenix.expression.function.CountAggregateFunction;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.parse.AliasedNode;
@@ -109,15 +111,9 @@ public class JoinCompiler {
     }
     
     public static JoinTable compile(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver) throws SQLException {
-        JoinCompiler compiler = new JoinCompiler(statement, select, resolver);
-        
-        List<TableNode> from = select.getFrom();
-        if (from.size() > 1) {
-            throw new SQLFeatureNotSupportedException("Cross join not supported.");
-        }
-        
+        JoinCompiler compiler = new JoinCompiler(statement, select, resolver);        
         JoinTableConstructor constructor = compiler.new JoinTableConstructor();
-        Pair<Table, List<JoinSpec>> res = from.get(0).accept(constructor);
+        Pair<Table, List<JoinSpec>> res = select.getFrom().accept(constructor);
         JoinTable joinTable = res.getSecond() == null ? compiler.new JoinTable(res.getFirst()) : compiler.new JoinTable(res.getFirst(), res.getSecond());
         if (select.getWhere() != null) {
             joinTable.addFilter(select.getWhere());
@@ -218,8 +214,8 @@ public class JoinCompiler {
         private final List<Table> tables;
         private final List<TableRef> tableRefs;
         private final boolean allLeftJoin;
-        private final boolean hasRightJoin;
-        private final List<JoinTable> prefilterAcceptedTables;
+        private final boolean isPrefilterAccepted;
+        private final List<JoinSpec> prefilterAcceptedTables;
         
         private JoinTable(Table table) {
             this.table = table;
@@ -228,8 +224,8 @@ public class JoinCompiler {
             this.tables = Collections.<Table>singletonList(table);
             this.tableRefs = Collections.<TableRef>singletonList(table.getTableRef());
             this.allLeftJoin = false;
-            this.hasRightJoin = false;
-            this.prefilterAcceptedTables = Collections.<JoinTable>emptyList();
+            this.isPrefilterAccepted = true;
+            this.prefilterAcceptedTables = Collections.<JoinSpec>emptyList();
         }
         
         private JoinTable(Table table, List<JoinSpec> joinSpecs) {
@@ -241,10 +237,12 @@ public class JoinCompiler {
             this.tables.add(table);
             boolean allLeftJoin = true;
             int lastRightJoinIndex = -1;
+            boolean hasFullJoin = false;
             for (int i = 0; i < joinSpecs.size(); i++) {
                 JoinSpec joinSpec = joinSpecs.get(i);
                 this.tables.addAll(joinSpec.getJoinTable().getTables());
                 allLeftJoin = allLeftJoin && joinSpec.getType() == JoinType.Left;
+                hasFullJoin = hasFullJoin || joinSpec.getType() == JoinType.Full;
                 if (joinSpec.getType() == JoinType.Right) {
                     lastRightJoinIndex = i;
                 }
@@ -253,12 +251,12 @@ public class JoinCompiler {
                 this.tableRefs.add(t.getTableRef());
             }
             this.allLeftJoin = allLeftJoin;
-            this.hasRightJoin = lastRightJoinIndex > -1;
-            this.prefilterAcceptedTables = new ArrayList<JoinTable>();
+            this.isPrefilterAccepted = !hasFullJoin && lastRightJoinIndex == -1;
+            this.prefilterAcceptedTables = new ArrayList<JoinSpec>();
             for (int i = lastRightJoinIndex == -1 ? 0 : lastRightJoinIndex; i < joinSpecs.size(); i++) {
                 JoinSpec joinSpec = joinSpecs.get(i);
-                if (joinSpec.getType() != JoinType.Left && joinSpec.getType() != JoinType.Anti) {
-                    prefilterAcceptedTables.add(joinSpec.getJoinTable());
+                if (joinSpec.getType() != JoinType.Left && joinSpec.getType() != JoinType.Anti && joinSpec.getType() != JoinType.Full) {
+                    prefilterAcceptedTables.add(joinSpec);
                 }
             }
         }
@@ -303,7 +301,7 @@ public class JoinCompiler {
             
             WhereNodeVisitor visitor = new WhereNodeVisitor(origResolver, table,
                     postFilters, Collections.<TableRef>singletonList(table.getTableRef()), 
-                    hasRightJoin, prefilterAcceptedTables);
+                    isPrefilterAccepted, prefilterAcceptedTables);
             filter.accept(visitor);
         }
         
@@ -415,11 +413,11 @@ public class JoinCompiler {
            if (!postFilters.isEmpty())
                return true;
            
-           if (!hasRightJoin && table.hasFilters())
+           if (isPrefilterAccepted && table.hasFilters())
                return true;
            
-           for (JoinTable joinTable : prefilterAcceptedTables) {
-               if (joinTable.hasFilters())
+           for (JoinSpec joinSpec : prefilterAcceptedTables) {
+               if (joinSpec.getJoinTable().hasFilters())
                    return true;
            }
            
@@ -433,6 +431,7 @@ public class JoinCompiler {
         private final JoinTable joinTable;
         private final boolean singleValueOnly;
         private Set<TableRef> dependencies;
+        private OnNodeVisitor onNodeVisitor;
         
         private JoinSpec(JoinType type, ParseNode onNode, JoinTable joinTable, 
                 boolean singleValueOnly, ColumnResolver resolver) throws SQLException {
@@ -441,13 +440,16 @@ public class JoinCompiler {
             this.joinTable = joinTable;
             this.singleValueOnly = singleValueOnly;
             this.dependencies = new HashSet<TableRef>();
-            OnNodeVisitor visitor = new OnNodeVisitor(resolver, onConditions, dependencies, joinTable);
-            onNode.accept(visitor);
-            if (onConditions.isEmpty()) {
-                visitor.throwUnsupportedJoinConditionException();
+            this.onNodeVisitor = new OnNodeVisitor(resolver, onConditions, dependencies, joinTable);
+            if (onNode != null) {
+                onNode.accept(this.onNodeVisitor);
             }
         }
         
+        public void addOnCondition(ParseNode node) throws SQLException {
+            node.accept(onNodeVisitor);
+        }
+        
         public JoinType getType() {
             return type;
         }
@@ -469,6 +471,12 @@ public class JoinCompiler {
         }
         
         public Pair<List<Expression>, List<Expression>> compileJoinConditions(StatementContext context, ColumnResolver leftResolver, ColumnResolver rightResolver) throws SQLException {
+            if (onConditions.isEmpty()) {
+                return new Pair<List<Expression>, List<Expression>>(
+                        Collections.<Expression> singletonList(LiteralExpression.newConstant(1)), 
+                        Collections.<Expression> singletonList(LiteralExpression.newConstant(1)));
+            }
+            
             ColumnResolver resolver = context.getResolver();
             List<Pair<Expression, Expression>> compiled = new ArrayList<Pair<Expression, Expression>>(onConditions.size());
             context.setResolver(leftResolver);
@@ -679,8 +687,7 @@ public class JoinCompiler {
             if (isSubselect())
                 return SubselectRewriter.applyPostFilters(subselect, preFilters, tableNode.getAlias());
             
-            List<TableNode> from = Collections.<TableNode>singletonList(tableNode);
-            return NODE_FACTORY.select(from, select.getHint(), false, selectNodes, getPreFiltersCombined(), null, null, null, null, 0, false, select.hasSequence());
+            return NODE_FACTORY.select(tableNode, select.getHint(), false, selectNodes, getPreFiltersCombined(), null, null, null, null, 0, false, select.hasSequence());
         }
         
         public boolean hasFilters() {
@@ -804,17 +811,17 @@ public class JoinCompiler {
         private Table table;
         private List<ParseNode> postFilters;
         private List<TableRef> selfTableRefs;
-        private boolean hasRightJoin;
-        private List<JoinTable> prefilterAcceptedTables;
+        private boolean isPrefilterAccepted;
+        private List<JoinSpec> prefilterAcceptedTables;
         ColumnRefParseNodeVisitor columnRefVisitor;
         
         public WhereNodeVisitor(ColumnResolver resolver, Table table,
-                List<ParseNode> postFilters, List<TableRef> selfTableRefs, boolean hasRightJoin, 
-                List<JoinTable> prefilterAcceptedTables) {
+                List<ParseNode> postFilters, List<TableRef> selfTableRefs, boolean isPrefilterAccepted, 
+                List<JoinSpec> prefilterAcceptedTables) {
             this.table = table;
             this.postFilters = postFilters;
             this.selfTableRefs = selfTableRefs;
-            this.hasRightJoin = hasRightJoin;
+            this.isPrefilterAccepted = isPrefilterAccepted;
             this.prefilterAcceptedTables = prefilterAcceptedTables;
             this.columnRefVisitor = new ColumnRefParseNodeVisitor(resolver);
         }
@@ -833,7 +840,7 @@ public class JoinCompiler {
             switch (type) {
             case NONE:
             case SELF_ONLY:
-                if (!hasRightJoin) {
+                if (isPrefilterAccepted) {
                     table.addFilter(node);
                 } else {
                     postFilters.add(node);
@@ -841,9 +848,9 @@ public class JoinCompiler {
                 break;
             case FOREIGN_ONLY:
                 JoinTable matched = null;
-                for (JoinTable joinTable : prefilterAcceptedTables) {
-                    if (columnRefVisitor.getContentType(joinTable.getTableRefs()) == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY) {
-                        matched = joinTable;
+                for (JoinSpec joinSpec : prefilterAcceptedTables) {
+                    if (columnRefVisitor.getContentType(joinSpec.getJoinTable().getTableRefs()) == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY) {
+                        matched = joinSpec.getJoinTable();
                         break;
                     }
                 }
@@ -879,6 +886,29 @@ public class JoinCompiler {
         public Void visitLeave(AndParseNode node, List<Void> l) throws SQLException {
             return null;
         }
+        
+        @Override
+        public Void visitLeave(ComparisonParseNode node, List<Void> l) 
+                throws SQLException {
+            if (!(node instanceof EqualParseNode))
+                return leaveBooleanNode(node, l);
+            
+            ListIterator<JoinSpec> iter = prefilterAcceptedTables.listIterator(prefilterAcceptedTables.size());
+            while (iter.hasPrevious()) {
+                JoinSpec joinSpec = iter.previous();
+                if (joinSpec.getType() != JoinType.Inner || joinSpec.isSingleValueOnly()) {
+                    continue;
+                }
+                
+                try {
+                    joinSpec.addOnCondition(node);
+                    return null;
+                } catch (SQLException e) {
+                }
+            }
+            
+            return leaveBooleanNode(node, l);
+        }
     }
     
     private static class OnNodeVisitor extends BooleanParseNodeVisitor<Void> {
@@ -894,49 +924,47 @@ public class JoinCompiler {
             this.joinTable = joinTable;
             this.columnRefVisitor = new ColumnRefParseNodeVisitor(resolver);
         }
-        
         @Override
         protected boolean enterBooleanNode(ParseNode node) throws SQLException {
             return false;
         }
-        
+
         @Override
         protected Void leaveBooleanNode(ParseNode node,
                 List<Void> l) throws SQLException {
             columnRefVisitor.reset();
             node.accept(columnRefVisitor);
             ColumnRefParseNodeVisitor.ColumnRefType type = columnRefVisitor.getContentType(joinTable.getTableRefs());
-            if (type == ColumnRefParseNodeVisitor.ColumnRefType.NONE 
+            if (type == ColumnRefParseNodeVisitor.ColumnRefType.NONE
                     || type == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY) {
                 joinTable.addFilter(node);
             } else {
-                throwUnsupportedJoinConditionException();
+                throwAmbiguousJoinConditionException();
             }
             return null;
         }
-        
+
         @Override
         protected boolean enterNonBooleanNode(ParseNode node) throws SQLException {
             return false;
         }
-        
+
         @Override
         protected Void leaveNonBooleanNode(ParseNode node, List<Void> l) throws SQLException {
             return null;
         }
-        
+
         @Override
         public boolean visitEnter(AndParseNode node) throws SQLException {
             return true;
         }
-
         @Override
         public Void visitLeave(AndParseNode node, List<Void> l) throws SQLException {
             return null;
         }
-        
+
         @Override
-        public Void visitLeave(ComparisonParseNode node, List<Void> l) 
+        public Void visitLeave(ComparisonParseNode node, List<Void> l)
                 throws SQLException {
             if (!(node instanceof EqualParseNode))
                 return leaveBooleanNode(node, l);
@@ -951,16 +979,16 @@ public class JoinCompiler {
             if ((lhsType == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY || lhsType == ColumnRefParseNodeVisitor.ColumnRefType.NONE)
                     && (rhsType == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY || rhsType == ColumnRefParseNodeVisitor.ColumnRefType.NONE)) {
                 joinTable.addFilter(node);
-            } else if (lhsType == ColumnRefParseNodeVisitor.ColumnRefType.FOREIGN_ONLY 
+            } else if (lhsType == ColumnRefParseNodeVisitor.ColumnRefType.FOREIGN_ONLY
                     && rhsType == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY) {
                 onConditions.add(node);
                 dependencies.addAll(lhsTableRefSet);
-            } else if (rhsType == ColumnRefParseNodeVisitor.ColumnRefType.FOREIGN_ONLY 
+            } else if (rhsType == ColumnRefParseNodeVisitor.ColumnRefType.FOREIGN_ONLY
                     && lhsType == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY) {
                 onConditions.add(NODE_FACTORY.equal(node.getRHS(), node.getLHS()));
                 dependencies.addAll(rhsTableRefSet);
             } else {
-                throwUnsupportedJoinConditionException();
+                throwAmbiguousJoinConditionException();
             }
             return null;
         }
@@ -972,10 +1000,9 @@ public class JoinCompiler {
          * 2) a boolean condition referencing to the self table only.
          * Otherwise, it can be ambiguous.
          */
-        public void throwUnsupportedJoinConditionException() 
-                throws SQLFeatureNotSupportedException {
-            throw new SQLFeatureNotSupportedException("Does not support non-standard or non-equi join conditions.");
-        }           
+        public void throwAmbiguousJoinConditionException() throws SQLException {
+            throw new SQLExceptionInfo.Builder(SQLExceptionCode.AMBIGUOUS_JOIN_CONDITION).build().buildException();
+        }
     }
 
     private static class ColumnRefParseNodeVisitor extends StatelessTraverseAllParseNodeVisitor {
@@ -1050,7 +1077,7 @@ public class JoinCompiler {
         return !select.isJoin() 
                 && !select.isAggregate() 
                 && !select.isDistinct() 
-                && !(select.getFrom().get(0) instanceof DerivedTableNode)
+                && !(select.getFrom() instanceof DerivedTableNode)
                 && select.getLimit() == null;
     }
     
@@ -1168,64 +1195,61 @@ public class JoinCompiler {
         if (replacement.isEmpty()) 
             return select;
         
-        List<TableNode> from = select.getFrom();
-        List<TableNode> newFrom = Lists.newArrayListWithExpectedSize(from.size());
-        for (TableNode node : from) {
-            newFrom.add(node.accept(new TableNodeVisitor<TableNode>() {
-                private TableRef resolveTable(String alias, TableName name) throws SQLException {
-                    if (alias != null)
-                        return resolver.resolveTable(null, alias);
+        TableNode from = select.getFrom();
+        TableNode newFrom = from.accept(new TableNodeVisitor<TableNode>() {
+            private TableRef resolveTable(String alias, TableName name) throws SQLException {
+                if (alias != null)
+                    return resolver.resolveTable(null, alias);
 
-                    return resolver.resolveTable(name.getSchemaName(), name.getTableName());
-                }
+                return resolver.resolveTable(name.getSchemaName(), name.getTableName());
+            }
 
-                private TableName getReplacedTableName(TableRef tableRef) {
-                    String schemaName = tableRef.getTable().getSchemaName().getString();
-                    return TableName.create(schemaName.length() == 0 ? null : schemaName, tableRef.getTable().getTableName().getString());
-                }
+            private TableName getReplacedTableName(TableRef tableRef) {
+                String schemaName = tableRef.getTable().getSchemaName().getString();
+                return TableName.create(schemaName.length() == 0 ? null : schemaName, tableRef.getTable().getTableName().getString());
+            }
 
-                @Override
-                public TableNode visit(BindTableNode boundTableNode) throws SQLException {
-                    TableRef tableRef = resolveTable(boundTableNode.getAlias(), boundTableNode.getName());
-                    TableRef replaceRef = replacement.get(tableRef);
-                    if (replaceRef == null)
-                        return boundTableNode;
+            @Override
+            public TableNode visit(BindTableNode boundTableNode) throws SQLException {
+                TableRef tableRef = resolveTable(boundTableNode.getAlias(), boundTableNode.getName());
+                TableRef replaceRef = replacement.get(tableRef);
+                if (replaceRef == null)
+                    return boundTableNode;
 
-                    String alias = boundTableNode.getAlias();
-                    return NODE_FACTORY.bindTable(alias == null ? null : '"' + alias + '"', getReplacedTableName(replaceRef));
-                }
+                String alias = boundTableNode.getAlias();
+                return NODE_FACTORY.bindTable(alias == null ? null : '"' + alias + '"', getReplacedTableName(replaceRef));
+            }
 
-                @Override
-                public TableNode visit(JoinTableNode joinNode) throws SQLException {
-                    TableNode lhs = joinNode.getLHS();
-                    TableNode rhs = joinNode.getRHS();
-                    TableNode lhsReplace = lhs.accept(this);
-                    TableNode rhsReplace = rhs.accept(this);
-                    if (lhs == lhsReplace && rhs == rhsReplace)
-                        return joinNode;
+            @Override
+            public TableNode visit(JoinTableNode joinNode) throws SQLException {
+                TableNode lhs = joinNode.getLHS();
+                TableNode rhs = joinNode.getRHS();
+                TableNode lhsReplace = lhs.accept(this);
+                TableNode rhsReplace = rhs.accept(this);
+                if (lhs == lhsReplace && rhs == rhsReplace)
+                    return joinNode;
 
-                    return NODE_FACTORY.join(joinNode.getType(), lhsReplace, rhsReplace, joinNode.getOnNode(), joinNode.isSingleValueOnly());
-                }
+                return NODE_FACTORY.join(joinNode.getType(), lhsReplace, rhsReplace, joinNode.getOnNode(), joinNode.isSingleValueOnly());
+            }
 
-                @Override
-                public TableNode visit(NamedTableNode namedTableNode)
-                        throws SQLException {
-                    TableRef tableRef = resolveTable(namedTableNode.getAlias(), namedTableNode.getName());
-                    TableRef replaceRef = replacement.get(tableRef);
-                    if (replaceRef == null)
-                        return namedTableNode;
+            @Override
+            public TableNode visit(NamedTableNode namedTableNode)
+                    throws SQLException {
+                TableRef tableRef = resolveTable(namedTableNode.getAlias(), namedTableNode.getName());
+                TableRef replaceRef = replacement.get(tableRef);
+                if (replaceRef == null)
+                    return namedTableNode;
 
-                    String alias = namedTableNode.getAlias();
-                    return NODE_FACTORY.namedTable(alias == null ? null : '"' + alias + '"', getReplacedTableName(replaceRef), namedTableNode.getDynamicColumns());
-                }
+                String alias = namedTableNode.getAlias();
+                return NODE_FACTORY.namedTable(alias == null ? null : '"' + alias + '"', getReplacedTableName(replaceRef), namedTableNode.getDynamicColumns());
+            }
 
-                @Override
-                public TableNode visit(DerivedTableNode subselectNode)
-                        throws SQLException {
-                    return subselectNode;
-                }
-            }));
-        }
+            @Override
+            public TableNode visit(DerivedTableNode subselectNode)
+                    throws SQLException {
+                return subselectNode;
+            }
+        });
         
         return IndexStatementRewriter.translate(NODE_FACTORY.select(select, newFrom), resolver, replacement);        
     }
@@ -1249,9 +1273,8 @@ public class JoinCompiler {
             }
         }
         String tableAlias = tableRef.getTableAlias();
-        List<? extends TableNode> from = Collections.singletonList(NODE_FACTORY.namedTable(tableAlias == null ? null : '"' + tableAlias + '"', tName, dynamicCols));
+        TableNode from = NODE_FACTORY.namedTable(tableAlias == null ? null : '"' + tableAlias + '"', tName, dynamicCols);
 
-        // TODO: review, as it seems like we're potentially losing if the select statement is an aggregate (i.e. if it's an ungrouped aggregate for example)
         return NODE_FACTORY.select(from, hintNode, false, selectList, where, groupBy, null, orderBy, null, 0, groupBy != null, hasSequence);
     }
     

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
index 214330c..b9ca813 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
@@ -247,7 +247,7 @@ public class QueryCompiler {
         JoinSpec lastJoinSpec = joinSpecs.get(joinSpecs.size() - 1);
         JoinType type = lastJoinSpec.getType();
         if (type == JoinType.Full)
-            throw new SQLFeatureNotSupportedException("Full joins not supported.");
+            throw new SQLFeatureNotSupportedException(type + " joins not supported.");
         
         if (type == JoinType.Right || type == JoinType.Inner) {
             if (!lastJoinSpec.getJoinTable().getJoinSpecs().isEmpty())
@@ -294,7 +294,7 @@ public class QueryCompiler {
             if (rhs.getLimit() != null && !rhs.isAggregate() && !rhs.isDistinct() && rhs.getOrderBy().isEmpty()) {
                 limit = LimitCompiler.compile(context, rhs);
             }
-            HashJoinInfo joinInfo = new HashJoinInfo(projectedTable.getTable(), joinIds, new List[] {joinExpressions}, new JoinType[] {type == JoinType.Inner ? type : JoinType.Left}, new boolean[] {true}, new PTable[] {lhsProjTable.getTable()}, new int[] {fieldPosition}, postJoinFilterExpression, limit, forceProjection);
+            HashJoinInfo joinInfo = new HashJoinInfo(projectedTable.getTable(), joinIds, new List[] {joinExpressions}, new JoinType[] {type == JoinType.Right ? JoinType.Left : type}, new boolean[] {true}, new PTable[] {lhsProjTable.getTable()}, new int[] {fieldPosition}, postJoinFilterExpression, limit, forceProjection);
             Pair<Expression, Expression> keyRangeExpressions = new Pair<Expression, Expression>(null, null);
             getKeyExpressionCombinations(keyRangeExpressions, context, rhsTableRef, type, joinExpressions, hashExpressions);
             return HashJoinPlan.create(joinTable.getStatement(), rhsPlan, joinInfo, new HashSubPlan[] {new HashSubPlan(0, lhsPlan, hashExpressions, false, keyRangeExpressions.getFirst(), keyRangeExpressions.getSecond(), lhsJoin.hasFilters())});

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java
index 803f554..f6a6f7a 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java
@@ -37,7 +37,6 @@ import org.apache.phoenix.parse.ParseNode;
 import org.apache.phoenix.parse.ParseNodeRewriter;
 import org.apache.phoenix.parse.SelectStatement;
 import org.apache.phoenix.parse.TableName;
-import org.apache.phoenix.parse.TableNode;
 import org.apache.phoenix.parse.TableNodeVisitor;
 import org.apache.phoenix.parse.TableWildcardParseNode;
 import org.apache.phoenix.parse.WildcardParseNode;
@@ -88,13 +87,10 @@ public class StatementNormalizer extends ParseNodeRewriter {
                     if (selectNodes == normSelectNodes) {
                         normSelectNodes = Lists.newArrayList(selectNodes.subList(0, i));
                     }
-                    for (TableNode tNode : statement.getFrom()) {
-                        TableNameVisitor visitor = new TableNameVisitor();
-                        List<TableName> tableNames = tNode.accept(visitor);
-                        for (TableName tableName : tableNames) {
-                            TableWildcardParseNode node = NODE_FACTORY.tableWildcard(tableName);
-                            normSelectNodes.add(NODE_FACTORY.aliasedNode(null, node));
-                        }
+                    List<TableName> tableNames = statement.getFrom().accept(new TableNameVisitor());
+                    for (TableName tableName : tableNames) {
+                        TableWildcardParseNode node = NODE_FACTORY.tableWildcard(tableName);
+                        normSelectNodes.add(NODE_FACTORY.aliasedNode(null, node));
                     }
                 } else if (selectNodes != normSelectNodes) {
                     normSelectNodes.add(aliasedNode);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java
index 3e470ce..01aca00 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java
@@ -83,13 +83,13 @@ public class SubqueryRewriter extends ParseNodeRewriter {
         if (normWhere == where)
             return select;
         
-        return NODE_FACTORY.select(select, Collections.singletonList(rewriter.tableNode), normWhere);
+        return NODE_FACTORY.select(select, rewriter.tableNode, normWhere);
     }
     
     protected SubqueryRewriter(SelectStatement select, ColumnResolver resolver, PhoenixConnection connection) {
         this.resolver = resolver;
         this.connection = connection;
-        this.tableNode = select.getFrom().get(0);
+        this.tableNode = select.getFrom();
         this.topNode = null;
     }
     
@@ -339,7 +339,7 @@ public class SubqueryRewriter extends ParseNodeRewriter {
                 groupbyNodes.set(i - 1, aliasedNode.getNode());
             }
             SelectStatement derivedTableStmt = NODE_FACTORY.select(subquery, subquery.isDistinct(), derivedTableSelect, where, derivedTableGroupBy, true);
-            subquery = NODE_FACTORY.select(Collections.singletonList(NODE_FACTORY.derivedTable(derivedTableAlias, derivedTableStmt)), subquery.getHint(), false, selectNodes, null, groupbyNodes, null, Collections.<OrderByNode> emptyList(), null, subquery.getBindCount(), true, false);
+            subquery = NODE_FACTORY.select(NODE_FACTORY.derivedTable(derivedTableAlias, derivedTableStmt), subquery.getHint(), false, selectNodes, null, groupbyNodes, null, Collections.<OrderByNode> emptyList(), null, subquery.getBindCount(), true, false);
         }
         
         ParseNode onNode = conditionExtractor.getJoinCondition();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
index 35ea900..d229478 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
@@ -59,9 +59,9 @@ public class SubselectRewriter extends ParseNodeRewriter {
     }
     
     public static SelectStatement flatten(SelectStatement select, PhoenixConnection connection) throws SQLException {
-        List<TableNode> from = select.getFrom();
-        while (from.size() == 1 && from.get(0) instanceof DerivedTableNode) {
-            DerivedTableNode derivedTable = (DerivedTableNode) from.get(0);
+        TableNode from = select.getFrom();
+        while (from != null && from instanceof DerivedTableNode) {
+            DerivedTableNode derivedTable = (DerivedTableNode) from;
             SelectStatement subselect = derivedTable.getSelect();
             ColumnResolver resolver = FromCompiler.getResolverForQuery(subselect, connection);
             SubselectRewriter rewriter = new SubselectRewriter(resolver, subselect.getSelect(), derivedTable.getAlias());

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 44f62da..0be40b8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -381,7 +381,7 @@ public class UpsertCompiler {
                         selectResolver = FromCompiler.getResolverForQuery(transformedSelect, connection);
                         select = StatementNormalizer.normalize(transformedSelect, selectResolver);
                     }
-                    sameTable = select.getFrom().size() == 1
+                    sameTable = !select.isJoin()
                         && tableRefToBe.equals(selectResolver.getTables().get(0));
                     tableRefToBe = adjustTimestampToMinOfSameTable(tableRefToBe, selectResolver.getTables());
                     /* We can run the upsert in a coprocessor if:

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
index bf13eec..5c6018d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
@@ -80,6 +80,7 @@ public enum SQLExceptionCode {
     VALUE_IN_LIST_NOT_CONSTANT(214, "22008", "Values in IN must evaluate to a constant."),
     SINGLE_ROW_SUBQUERY_RETURNS_MULTIPLE_ROWS(215, "22015", "Single-row sub-query returns more than one row."),
     SUBQUERY_RETURNS_DIFFERENT_NUMBER_OF_FIELDS(216, "22016", "Sub-query must return the same number of fields as the left-hand-side expression of 'IN'."),
+    AMBIGUOUS_JOIN_CONDITION(217, "22017", "Amibiguous or non-equi join condition specified. Consider using table list with where clause."),
     
     /**
      * Constraint Violation (errorcode 03, sqlstate 23)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
index f6b7736..c369be8 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
@@ -292,7 +292,7 @@ public class PhoenixStatement implements Statement, SQLCloseable, org.apache.pho
     }
     
     private static class ExecutableSelectStatement extends SelectStatement implements CompilableStatement {
-        private ExecutableSelectStatement(List<? extends TableNode> from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
+        private ExecutableSelectStatement(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
                 List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate, boolean hasSequence) {
             super(from, hint, isDistinct, select, where, groupBy, having, orderBy, limit, bindCount, isAggregate, hasSequence);
         }
@@ -796,7 +796,7 @@ public class PhoenixStatement implements Statement, SQLCloseable, org.apache.pho
 
     protected static class ExecutableNodeFactory extends ParseNodeFactory {
         @Override
-        public ExecutableSelectStatement select(List<? extends TableNode> from, HintNode hint, boolean isDistinct, List<AliasedNode> select,
+        public ExecutableSelectStatement select(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select,
                                                 ParseNode where, List<ParseNode> groupBy, ParseNode having,
                                                 List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate, boolean hasSequence) {
             return new ExecutableSelectStatement(from, hint, isDistinct, select, where, groupBy == null ? Collections.<ParseNode>emptyList() : groupBy,

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java b/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
index f027ab3..8f6d026 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/optimize/QueryOptimizer.java
@@ -214,9 +214,9 @@ public class QueryOptimizer {
         schemaName = schemaName.length() == 0 ? null :  '"' + schemaName + '"';
 
         String tableName = '"' + index.getTableName().getString() + '"';
-        List<? extends TableNode> tables = Collections.singletonList(FACTORY.namedTable(alias, FACTORY.table(schemaName, tableName)));
+        TableNode table = FACTORY.namedTable(alias, FACTORY.table(schemaName, tableName));
         try {
-            SelectStatement indexSelect = FACTORY.select(select, tables);
+            SelectStatement indexSelect = FACTORY.select(select, table);
             ColumnResolver resolver = FromCompiler.getResolverForQuery(indexSelect, statement.getConnection());
             // Check index state of now potentially updated index table to make sure it's active
             if (PIndexState.ACTIVE.equals(resolver.getTables().get(0).getTable().getIndexState())) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/parse/JoinPartNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/JoinPartNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/JoinPartNode.java
deleted file mode 100644
index cdbaaea..0000000
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/JoinPartNode.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.parse;
-
-import org.apache.phoenix.parse.JoinTableNode.JoinType;
-
-/**
- * 
- * Node representing the partial join clause in the FROM clause of SQL
- *
- * 
- * @since 0.1
- */
-class JoinPartNode {
-    
-    private final JoinType type;
-    private final ParseNode onNode;
-    private final TableNode table;
-    
-    JoinPartNode(JoinType type, ParseNode onNode, TableNode table) {
-        this.type = type;
-        this.onNode = onNode;
-        this.table = table;
-    }
-
-    JoinType getType() {
-        return type;
-    }
-    
-    ParseNode getOnNode() {
-        return onNode;
-    }
-    
-    TableNode getTable() {
-        return table;
-    }
-}
-

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
index 6f8339e..cc0b455 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
@@ -415,17 +415,6 @@ public class ParseNodeFactory {
         return new IsNullParseNode(child, negate);
     }
 
-    public TableNode table(TableNode table, List<JoinPartNode> parts) {
-        for (JoinPartNode part : parts) {
-            table = new JoinTableNode(part.getType(), table, part.getTable(), part.getOnNode(), false);
-        }
-        return table;
-    }
-
-    JoinPartNode joinPart(JoinType type, ParseNode onNode, TableNode table) {
-        return new JoinPartNode(type, onNode, table);
-    }
-
     public JoinTableNode join(JoinType type, TableNode lhs, TableNode rhs, ParseNode on, boolean singleValueOnly) {
         return new JoinTableNode(type, lhs, rhs, on, singleValueOnly);
     }
@@ -586,7 +575,7 @@ public class ParseNodeFactory {
         return new OuterJoinParseNode(node);
     }
 
-    public SelectStatement select(List<? extends TableNode> from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
+    public SelectStatement select(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
             List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate, boolean hasSequence) {
 
         return new SelectStatement(from, hint, isDistinct, select, where, groupBy == null ? Collections.<ParseNode>emptyList() : groupBy, having,
@@ -606,14 +595,14 @@ public class ParseNodeFactory {
                 statement.getOrderBy(), statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
     }
 
-    public SelectStatement select(SelectStatement statement, List<? extends TableNode> tables) {
-        return select(tables, statement.getHint(), statement.isDistinct(), statement.getSelect(), statement.getWhere(), statement.getGroupBy(),
+    public SelectStatement select(SelectStatement statement, TableNode table) {
+        return select(table, statement.getHint(), statement.isDistinct(), statement.getSelect(), statement.getWhere(), statement.getGroupBy(),
                 statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getBindCount(), statement.isAggregate(),
                 statement.hasSequence());
     }
 
-    public SelectStatement select(SelectStatement statement, List<? extends TableNode> tables, ParseNode where) {
-        return select(tables, statement.getHint(), statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(),
+    public SelectStatement select(SelectStatement statement, TableNode table, ParseNode where) {
+        return select(table, statement.getHint(), statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(),
                 statement.getHaving(), statement.getOrderBy(), statement.getLimit(), statement.getBindCount(), statement.isAggregate(),
                 statement.hasSequence());
     }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
index 338a45b..809480f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
@@ -57,24 +57,8 @@ public class ParseNodeRewriter extends TraverseAllParseNodeVisitor<ParseNode> {
      */
     public static SelectStatement rewrite(SelectStatement statement, ParseNodeRewriter rewriter) throws SQLException {
         Map<String,ParseNode> aliasMap = rewriter.getAliasMap();
-        List<TableNode> from = statement.getFrom();
-        List<TableNode> normFrom = from;
-        TableNodeRewriter tableNodeRewriter = new TableNodeRewriter(rewriter);
-        for (int i = 0; i < from.size(); i++) {
-            TableNode tableNode = from.get(i);
-            tableNodeRewriter.reset();
-            TableNode normTableNode = tableNode.accept(tableNodeRewriter);
-            if (normTableNode == tableNode) {
-                if (from != normFrom) {
-                    normFrom.add(tableNode);
-                }
-                continue;
-            }
-            if (from == normFrom) {
-                normFrom = Lists.newArrayList(from.subList(0, i));        		    
-            }
-            normFrom.add(normTableNode);
-        }
+        TableNode from = statement.getFrom();
+        TableNode normFrom = from.accept(new TableNodeRewriter(rewriter));
         ParseNode where = statement.getWhere();
         ParseNode normWhere = where;
         if (where != null) {
@@ -541,9 +525,6 @@ public class ParseNodeRewriter extends TraverseAllParseNodeVisitor<ParseNode> {
 	        this.parseNodeRewriter = parseNodeRewriter;
 	    }
 
-	    public void reset() {
-	    }
-
         @Override
         public TableNode visit(BindTableNode boundTableNode) throws SQLException {
             return boundTableNode;
@@ -557,7 +538,7 @@ public class ParseNodeRewriter extends TraverseAllParseNodeVisitor<ParseNode> {
             TableNode normLhsNode = lhsNode.accept(this);
             TableNode normRhsNode = rhsNode.accept(this);
             parseNodeRewriter.reset();
-            ParseNode normOnNode = onNode.accept(parseNodeRewriter);
+            ParseNode normOnNode = onNode == null ? null : onNode.accept(parseNodeRewriter);
             if (lhsNode == normLhsNode && rhsNode == normRhsNode && onNode == normOnNode)
                 return joinNode;
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/91e29c60/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
index e7302dc..961846b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
@@ -36,14 +36,14 @@ import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunctionInfo;
 public class SelectStatement implements FilterableStatement {
     public static final SelectStatement SELECT_ONE =
             new SelectStatement(
-                    Collections.<TableNode>emptyList(), null, false, 
+                    null, null, false, 
                     Collections.<AliasedNode>singletonList(new AliasedNode(null, LiteralParseNode.ONE)),
                     null, Collections.<ParseNode>emptyList(),
                     null, Collections.<OrderByNode>emptyList(),
                     null, 0, false, false);
     public static final SelectStatement COUNT_ONE =
             new SelectStatement(
-                    Collections.<TableNode>emptyList(), null, false,
+                    null, null, false,
                     Collections.<AliasedNode>singletonList(
                     new AliasedNode(null, 
                         new AggregateFunctionParseNode(
@@ -80,7 +80,7 @@ public class SelectStatement implements FilterableStatement {
                 select.getOrderBy(), select.getLimit(), select.getBindCount(), select.isAggregate(), select.hasSequence());
     }
     
-    private final List<TableNode> fromTable;
+    private final TableNode fromTable;
     private final HintNode hint;
     private final boolean isDistinct;
     private final List<AliasedNode> select;
@@ -104,10 +104,10 @@ public class SelectStatement implements FilterableStatement {
         return count;
     }
     
-    protected SelectStatement(List<? extends TableNode> from, HintNode hint, boolean isDistinct, List<AliasedNode> select,
+    protected SelectStatement(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select,
             ParseNode where, List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit,
             int bindCount, boolean isAggregate, boolean hasSequence) {
-        this.fromTable = Collections.unmodifiableList(from);
+        this.fromTable = from;
         this.hint = hint == null ? HintNode.EMPTY_HINT_NODE : hint;
         this.isDistinct = isDistinct;
         this.select = Collections.unmodifiableList(select);
@@ -136,7 +136,7 @@ public class SelectStatement implements FilterableStatement {
         return bindCount;
     }
     
-    public List<TableNode> getFrom() {
+    public TableNode getFrom() {
         return fromTable;
     }
     
@@ -190,13 +190,13 @@ public class SelectStatement implements FilterableStatement {
     }
 
     public boolean isJoin() {
-        return fromTable.size() > 1 || (fromTable.size() > 0 && fromTable.get(0) instanceof JoinTableNode);
+        return fromTable != null && fromTable instanceof JoinTableNode;
     }
     
     public SelectStatement getInnerSelectStatement() {
-        if (fromTable.size() != 1 || !(fromTable.get(0) instanceof DerivedTableNode))
+        if (fromTable == null || !(fromTable instanceof DerivedTableNode))
             return null;
         
-        return ((DerivedTableNode) fromTable.get(0)).getSelect();
+        return ((DerivedTableNode) fromTable).getSelect();
     }
 }


[2/5] git commit: PHOENIX-944 Support derived tables in FROM clause that needs extra steps of client-side aggregation or other processing

Posted by ma...@apache.org.
PHOENIX-944 Support derived tables in FROM clause that needs extra steps of client-side aggregation or other processing


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f004e135
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f004e135
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f004e135

Branch: refs/heads/4.0
Commit: f004e135c8ae72c6a22ed9879b37dd6dcf86627a
Parents: e8a0355
Author: maryannxue <ma...@apache.org>
Authored: Wed Oct 22 13:48:35 2014 -0400
Committer: maryannxue <ma...@apache.org>
Committed: Thu Oct 23 23:25:18 2014 -0400

----------------------------------------------------------------------
 .../apache/phoenix/end2end/DerivedTableIT.java  | 282 +++++++++++++------
 .../org/apache/phoenix/end2end/SubqueryIT.java  |  12 +
 .../apache/phoenix/compile/FromCompiler.java    |  27 +-
 .../apache/phoenix/compile/GroupByCompiler.java |   5 +-
 .../apache/phoenix/compile/JoinCompiler.java    |   2 +-
 .../apache/phoenix/compile/OrderByCompiler.java |   2 +-
 .../apache/phoenix/compile/QueryCompiler.java   |  58 +++-
 .../phoenix/compile/SubqueryRewriter.java       |  10 +-
 .../TrackOrderPreservingExpressionCompiler.java |  27 +-
 .../apache/phoenix/compile/WhereCompiler.java   |  33 +--
 .../GroupedAggregateRegionObserver.java         |   2 +-
 .../coprocessor/HashJoinRegionScanner.java      |   4 +-
 .../phoenix/coprocessor/ScanRegionObserver.java |   3 +-
 .../UngroupedAggregateRegionObserver.java       |   2 +-
 .../phoenix/execute/ClientAggregatePlan.java    | 229 +++++++++++++++
 .../phoenix/execute/ClientProcessingPlan.java   |  82 ++++++
 .../apache/phoenix/execute/ClientScanPlan.java  |  92 ++++++
 .../apache/phoenix/execute/HashJoinPlan.java    |  24 +-
 .../phoenix/execute/TupleProjectionPlan.java    |  49 +---
 .../apache/phoenix/execute/TupleProjector.java  | 276 ++++++++++++++++++
 .../expression/ProjectedColumnExpression.java   |   2 +-
 .../DistinctValueClientAggregator.java          |   7 +-
 .../BaseGroupedAggregatingResultIterator.java   | 105 +++++++
 .../GroupedAggregatingResultIterator.java       |  67 +----
 .../iterate/LookAheadResultIterator.java        |   4 +
 .../org/apache/phoenix/join/TupleProjector.java | 260 -----------------
 .../apache/phoenix/optimize/QueryOptimizer.java |   1 +
 .../apache/phoenix/parse/ParseNodeFactory.java  |   4 +-
 .../apache/phoenix/parse/SelectStatement.java   |   7 +
 .../org/apache/phoenix/schema/ColumnRef.java    |   2 +-
 .../java/org/apache/phoenix/util/IndexUtil.java |   2 +-
 31 files changed, 1183 insertions(+), 499 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/it/java/org/apache/phoenix/end2end/DerivedTableIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DerivedTableIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DerivedTableIT.java
index 8a80764..8ef542a 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/DerivedTableIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/DerivedTableIT.java
@@ -35,19 +35,19 @@ import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
+import java.sql.Array;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
-import java.sql.SQLFeatureNotSupportedException;
 import java.util.Collection;
 import java.util.List;
 import java.util.Properties;
 
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.QueryUtil;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
@@ -61,33 +61,65 @@ import com.google.common.collect.Lists;
 @RunWith(Parameterized.class)
 public class DerivedTableIT extends BaseClientManagedTimeIT {
     private static final String tenantId = getOrganizationId();
-    private static final String MSG = "Complex nested queries not supported.";
     
     private long ts;
-    private String indexDDL;
+    private String[] indexDDL;
+    private String[] plans;
     
-    public DerivedTableIT(String indexDDL) {
+    public DerivedTableIT(String[] indexDDL, String[] plans) {
         this.indexDDL = indexDDL;
+        this.plans = plans;
     }
     
     @Before
     public void initTable() throws Exception {
          ts = nextTimestamp();
         initATableValues(tenantId, getDefaultSplits(tenantId), null, ts);
-        if (indexDDL != null && indexDDL.length() > 0) {
+        if (indexDDL != null && indexDDL.length > 0) {
             Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
             props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts));
             Connection conn = DriverManager.getConnection(getUrl(), props);
-            conn.createStatement().execute(indexDDL);
+            for (String ddl : indexDDL) {
+                conn.createStatement().execute(ddl);
+            }
         }
     }
     
     @Parameters(name="{0}")
     public static Collection<Object> data() {
         List<Object> testCases = Lists.newArrayList();
-        testCases.add(new String[] { "CREATE INDEX ATABLE_DERIVED_IDX ON aTable (a_byte) INCLUDE ("
-                + "    A_STRING, " + "    B_STRING)" });
-        testCases.add(new String[] { "" });
+        testCases.add(new String[][] {
+                { 
+                "CREATE INDEX ATABLE_DERIVED_IDX ON aTable (a_byte) INCLUDE (A_STRING, B_STRING)" 
+                }, {
+                "CLIENT PARALLEL 1-WAY FULL SCAN OVER ATABLE_DERIVED_IDX\n" +
+                "    SERVER AGGREGATE INTO DISTINCT ROWS BY [A_STRING, B_STRING]\n" +
+                "CLIENT MERGE SORT\n" +
+                "CLIENT SORTED BY [B_STRING]\n" +
+                "CLIENT SORTED BY [A]\n" +
+                "CLIENT AGGREGATE INTO DISTINCT ROWS BY [A]\n" +
+                "CLIENT SORTED BY [A DESC]",
+                
+                "CLIENT PARALLEL 1-WAY FULL SCAN OVER ATABLE_DERIVED_IDX\n" +
+                "    SERVER AGGREGATE INTO DISTINCT ROWS BY [A_STRING, B_STRING]\n" +
+                "CLIENT MERGE SORT\n" +
+                "CLIENT AGGREGATE INTO DISTINCT ROWS BY [A]\n" +
+                "CLIENT DISTINCT ON [COLLECTDISTINCT(B)]"}});
+        testCases.add(new String[][] {
+                {}, {
+                "CLIENT PARALLEL 4-WAY FULL SCAN OVER ATABLE\n" +
+                "    SERVER AGGREGATE INTO DISTINCT ROWS BY [A_STRING, B_STRING]\n" +
+                "CLIENT MERGE SORT\n" +
+                "CLIENT SORTED BY [B_STRING]\n" +
+                "CLIENT SORTED BY [A]\n" +
+                "CLIENT AGGREGATE INTO DISTINCT ROWS BY [A]\n" +
+                "CLIENT SORTED BY [A DESC]",
+                
+                "CLIENT PARALLEL 4-WAY FULL SCAN OVER ATABLE\n" +
+                "    SERVER AGGREGATE INTO DISTINCT ROWS BY [A_STRING, B_STRING]\n" +
+                "CLIENT MERGE SORT\n" +
+                "CLIENT AGGREGATE INTO DISTINCT ROWS BY [A]\n" +
+                "CLIENT DISTINCT ON [COLLECTDISTINCT(B)]"}});
         return testCases;
     }
 
@@ -183,21 +215,21 @@ public class DerivedTableIT extends BaseClientManagedTimeIT {
             
             // (limit) where
             query = "SELECT t.eid FROM (SELECT entity_id eid, b_string b FROM aTable LIMIT 2) AS t WHERE t.b = '" + C_VALUE + "'";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(ROW2,rs.getString(1));
+
+            assertFalse(rs.next());
 
             // (count) where
             query = "SELECT t.c FROM (SELECT count(*) c FROM aTable) AS t WHERE t.c > 0";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(9,rs.getInt(1));
+
+            assertFalse(rs.next());
         } finally {
             conn.close();
         }
@@ -227,12 +259,78 @@ public class DerivedTableIT extends BaseClientManagedTimeIT {
             
             // (groupby) groupby
             query = "SELECT t.c, count(*) FROM (SELECT count(*) c FROM aTable GROUP BY a_string) AS t GROUP BY t.c";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(1,rs.getInt(1));
+            assertEquals(1,rs.getInt(2));
+            assertTrue (rs.next());
+            assertEquals(4,rs.getInt(1));
+            assertEquals(2,rs.getInt(2));
+
+            assertFalse(rs.next());
+            
+            // (groupby) groupby orderby
+            query = "SELECT t.c, count(*) FROM (SELECT count(*) c FROM aTable GROUP BY a_string) AS t GROUP BY t.c ORDER BY count(*) DESC";
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(4,rs.getInt(1));
+            assertEquals(2,rs.getInt(2));
+            assertTrue (rs.next());
+            assertEquals(1,rs.getInt(1));
+            assertEquals(1,rs.getInt(2));
+
+            assertFalse(rs.next());
+            
+            // (groupby a, b orderby b) groupby a orderby a
+            query = "SELECT t.a, COLLECTDISTINCT(t.b) FROM (SELECT b_string b, a_string a FROM aTable GROUP BY a_string, b_string ORDER BY b_string) AS t GROUP BY t.a ORDER BY t.a DESC";
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(C_VALUE,rs.getString(1));
+            String[] b = new String[1];
+            b[0] = E_VALUE;
+            Array array = conn.createArrayOf("VARCHAR", b);
+            assertEquals(array,rs.getArray(2));
+            assertTrue (rs.next());
+            assertEquals(B_VALUE,rs.getString(1));
+            b = new String[3];
+            b[0] = B_VALUE;
+            b[1] = C_VALUE;
+            b[2] = E_VALUE;
+            array = conn.createArrayOf("VARCHAR", b);
+            assertEquals(array,rs.getArray(2));
+            assertTrue (rs.next());
+            assertEquals(A_VALUE,rs.getString(1));
+            assertEquals(array,rs.getArray(2));
+
+            assertFalse(rs.next());
+            
+            rs = conn.createStatement().executeQuery("EXPLAIN " + query);
+            assertEquals(plans[0], QueryUtil.getExplainPlan(rs));
+            
+            // distinct b (groupby b, a) groupby a
+            query = "SELECT DISTINCT COLLECTDISTINCT(t.b) FROM (SELECT b_string b, a_string a FROM aTable GROUP BY b_string, a_string) AS t GROUP BY t.a";
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            b = new String[1];
+            b[0] = E_VALUE;
+            array = conn.createArrayOf("VARCHAR", b);
+            assertEquals(array,rs.getArray(1));
+            assertTrue (rs.next());
+            b = new String[3];
+            b[0] = B_VALUE;
+            b[1] = C_VALUE;
+            b[2] = E_VALUE;
+            array = conn.createArrayOf("VARCHAR", b);
+            assertEquals(array,rs.getArray(1));
+
+            assertFalse(rs.next());
+            
+            rs = conn.createStatement().executeQuery("EXPLAIN " + query);
+            assertEquals(plans[1], QueryUtil.getExplainPlan(rs));
         } finally {
             conn.close();
         }
@@ -321,13 +419,15 @@ public class DerivedTableIT extends BaseClientManagedTimeIT {
             assertFalse(rs.next());
             
             // (limit) orderby
-            query = "SELECT t.eid FROM (SELECT entity_id eid, b_string b FROM aTable LIMIT 2) AS t ORDER BY t.b, t.eid";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            query = "SELECT t.eid FROM (SELECT entity_id eid, b_string b FROM aTable LIMIT 2) AS t ORDER BY t.b DESC, t.eid";
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(ROW2,rs.getString(1));
+            assertTrue (rs.next());
+            assertEquals(ROW1,rs.getString(1));
+
+            assertFalse(rs.next());
         } finally {
             conn.close();
         }
@@ -386,15 +486,16 @@ public class DerivedTableIT extends BaseClientManagedTimeIT {
             
             // limit ? limit ?            
             query = "SELECT t.eid FROM (SELECT entity_id eid FROM aTable LIMIT ?) AS t LIMIT ?";
-            try {
-                statement = conn.prepareStatement(query);
-                statement.setInt(1, 4);
-                statement.setInt(2, 2);
-                statement.executeQuery();
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            statement.setInt(1, 4);
+            statement.setInt(2, 2);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(ROW1,rs.getString(1));
+            assertTrue (rs.next());
+            assertEquals(ROW2,rs.getString(1));
+
+            assertFalse(rs.next());
             
             // (groupby orderby) limit
             query = "SELECT a, s FROM (SELECT a_string a, sum(a_byte) s FROM aTable GROUP BY a_string ORDER BY sum(a_byte)) LIMIT 2";
@@ -466,30 +567,51 @@ public class DerivedTableIT extends BaseClientManagedTimeIT {
             
             // distinct (distinct)
             query = "SELECT DISTINCT t.a FROM (SELECT DISTINCT a_string a, b_string b FROM aTable) AS t";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(A_VALUE,rs.getString(1));
+            assertTrue (rs.next());
+            assertEquals(B_VALUE,rs.getString(1));
+            assertTrue (rs.next());
+            assertEquals(C_VALUE,rs.getString(1));
+
+            assertFalse(rs.next());
             
             // distinct (groupby)
             query = "SELECT distinct t.c FROM (SELECT count(*) c FROM aTable GROUP BY a_string) AS t";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(1,rs.getInt(1));
+            assertTrue (rs.next());
+            assertEquals(4,rs.getInt(1));
+
+            assertFalse(rs.next());
+            
+            // distinct (groupby) orderby
+            query = "SELECT distinct t.c FROM (SELECT count(*) c FROM aTable GROUP BY a_string) AS t ORDER BY t.c DESC";
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(4,rs.getInt(1));
+            assertTrue (rs.next());
+            assertEquals(1,rs.getInt(1));
+
+            assertFalse(rs.next());
             
             // distinct (limit)
             query = "SELECT DISTINCT t.a, t.b FROM (SELECT a_string a, b_string b FROM aTable LIMIT 2) AS t";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(A_VALUE,rs.getString(1));
+            assertEquals(B_VALUE,rs.getString(2));
+            assertTrue (rs.next());
+            assertEquals(A_VALUE,rs.getString(1));
+            assertEquals(C_VALUE,rs.getString(2));
+
+            assertFalse(rs.next());
         } finally {
             conn.close();
         }
@@ -522,30 +644,30 @@ public class DerivedTableIT extends BaseClientManagedTimeIT {
             
             // count (distinct)
             query = "SELECT count(*) FROM (SELECT DISTINCT a_string FROM aTable) AS t";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(3,rs.getInt(1));
+
+            assertFalse(rs.next());
             
             // count (groupby)
             query = "SELECT count(*) FROM (SELECT count(*) c FROM aTable GROUP BY a_string) AS t";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(3,rs.getInt(1));
+
+            assertFalse(rs.next());
             
             // count (limit)
             query = "SELECT count(*) FROM (SELECT entity_id FROM aTable LIMIT 2) AS t";
-            try {
-                conn.createStatement().executeQuery(query);
-                fail("Should have got SQLFeatureNotSupportedException");
-            } catch (SQLFeatureNotSupportedException e) {                
-                assertEquals(MSG, e.getMessage());
-            }
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(2,rs.getInt(1));
+
+            assertFalse(rs.next());
         } finally {
             conn.close();
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
index e4b4c8b..4f3ca16 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SubqueryIT.java
@@ -978,6 +978,18 @@ public class SubqueryIT extends BaseHBaseManagedTimeIT {
             assertEquals(rs.getString(2), "T6");
 
             assertFalse(rs.next());
+            
+            query = "SELECT \"order_id\", name FROM " + JOIN_ORDER_TABLE_FULL_NAME + " o JOIN " + JOIN_ITEM_TABLE_FULL_NAME + " i ON o.\"item_id\" = i.\"item_id\" WHERE quantity != ANY(SELECT quantity FROM " + JOIN_ORDER_TABLE_FULL_NAME + " q WHERE o.\"item_id\" = q.\"item_id\" GROUP BY quantity)";
+            statement = conn.prepareStatement(query);
+            rs = statement.executeQuery();
+            assertTrue (rs.next());
+            assertEquals(rs.getString(1), "000000000000002");
+            assertEquals(rs.getString(2), "T6");
+            assertTrue (rs.next());
+            assertEquals(rs.getString(1), "000000000000004");
+            assertEquals(rs.getString(2), "T6");
+
+            assertFalse(rs.next());
         } finally {
             conn.close();
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
index 0fed42a..1627f45 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult;
+import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.parse.AliasedNode;
 import org.apache.phoenix.parse.BindTableNode;
@@ -175,6 +176,23 @@ public class FromCompiler {
         SingleTableColumnResolver visitor = new SingleTableColumnResolver(connection, statement.getTable(), true);
         return visitor;
     }
+    
+    public static ColumnResolver getResolverForCompiledDerivedTable(PhoenixConnection connection, TableRef tableRef, RowProjector projector) 
+            throws SQLException {
+        List<PColumn> projectedColumns = new ArrayList<PColumn>();
+        List<Expression> sourceExpressions = new ArrayList<Expression>();
+        PTable table = tableRef.getTable();
+        for (PColumn column : table.getColumns()) {
+            Expression sourceExpression = projector.getColumnProjector(column.getPosition()).getExpression();
+            PColumnImpl projectedColumn = new PColumnImpl(column.getName(), column.getFamilyName(), 
+                    sourceExpression.getDataType(), sourceExpression.getMaxLength(), sourceExpression.getScale(), sourceExpression.isNullable(), 
+                    column.getPosition(), sourceExpression.getSortOrder(), column.getArraySize(), column.getViewConstant(), column.isViewReferenced());                
+            projectedColumns.add(projectedColumn);
+            sourceExpressions.add(sourceExpression);
+        }
+        PTable t = PTableImpl.makePTable(table, projectedColumns);
+        return new SingleTableColumnResolver(connection, new TableRef(tableRef.getTableAlias(), t, tableRef.getLowerBoundTimeStamp(), tableRef.hasDynamicCols()));
+    }
 
     public static ColumnResolver getResolverForMutation(DMLStatement statement, PhoenixConnection connection)
             throws SQLException {
@@ -215,6 +233,12 @@ public class FromCompiler {
             TableRef tableRef = createTableRef(tableNode, updateCacheImmediately);
             tableRefs = ImmutableList.of(tableRef);
         }
+        
+        public SingleTableColumnResolver(PhoenixConnection connection, TableRef tableRef) {
+            super(connection, 0);
+            alias = tableRef.getTableAlias();
+            tableRefs = ImmutableList.of(tableRef);
+        }
 
         @Override
 		public List<TableRef> getTables() {
@@ -366,8 +390,7 @@ public class FromCompiler {
         }
     }
     
-    // TODO: unused, but should be used for joins - make private once used
-    public static class MultiTableColumnResolver extends BaseColumnResolver implements TableNodeVisitor<Void> {
+    private static class MultiTableColumnResolver extends BaseColumnResolver implements TableNodeVisitor<Void> {
         private final ListMultimap<String, TableRef> tableMap;
         private final List<TableRef> tables;
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java
index dda27aa..a561a47 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java
@@ -30,6 +30,7 @@ import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
 import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.CoerceExpression;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.parse.AliasedNode;
@@ -135,7 +136,7 @@ public class GroupByCompiler {
      * @throws ColumnNotFoundException if column name could not be resolved
      * @throws AmbiguousColumnException if an unaliased column name is ambiguous across multiple tables
      */
-    public static GroupBy compile(StatementContext context, SelectStatement statement) throws SQLException {
+    public static GroupBy compile(StatementContext context, SelectStatement statement, TupleProjector tupleProjector) throws SQLException {
         List<ParseNode> groupByNodes = statement.getGroupBy();
         /**
          * Distinct can use an aggregate plan if there's no group by.
@@ -160,7 +161,7 @@ public class GroupByCompiler {
         TrackOrderPreservingExpressionCompiler groupByVisitor =
                 new TrackOrderPreservingExpressionCompiler(context, 
                         GroupBy.EMPTY_GROUP_BY, groupByNodes.size(), 
-                        Ordering.UNORDERED);
+                        Ordering.UNORDERED, tupleProjector);
         for (ParseNode node : groupByNodes) {
             Expression expression = node.accept(groupByVisitor);
             if (groupByVisitor.isAggregate()) {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
index ef053de..140146c 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
@@ -35,12 +35,12 @@ import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.AndExpression;
 import org.apache.phoenix.expression.CoerceExpression;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.function.CountAggregateFunction;
 import org.apache.phoenix.jdbc.PhoenixStatement;
-import org.apache.phoenix.join.TupleProjector;
 import org.apache.phoenix.parse.AliasedNode;
 import org.apache.phoenix.parse.AndParseNode;
 import org.apache.phoenix.parse.BindTableNode;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java
index 0fd07ec..2629846 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java
@@ -85,7 +85,7 @@ public class OrderByCompiler {
         // accumulate columns in ORDER BY
         TrackOrderPreservingExpressionCompiler visitor = 
                 new TrackOrderPreservingExpressionCompiler(context, groupBy, 
-                        orderByNodes.size(), Ordering.ORDERED);
+                        orderByNodes.size(), Ordering.ORDERED, null);
         LinkedHashSet<OrderByExpression> orderByExpressions = Sets.newLinkedHashSetWithExpectedSize(orderByNodes.size());
         for (OrderByNode node : orderByNodes) {
             boolean isAscending = node.isAscending();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
index d82ac02..0eafcdb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
@@ -34,11 +34,14 @@ import org.apache.phoenix.compile.JoinCompiler.ProjectedPTableWrapper;
 import org.apache.phoenix.compile.JoinCompiler.Table;
 import org.apache.phoenix.compile.OrderByCompiler.OrderBy;
 import org.apache.phoenix.execute.AggregatePlan;
+import org.apache.phoenix.execute.ClientAggregatePlan;
+import org.apache.phoenix.execute.ClientScanPlan;
 import org.apache.phoenix.execute.HashJoinPlan;
 import org.apache.phoenix.execute.HashJoinPlan.HashSubPlan;
 import org.apache.phoenix.execute.HashJoinPlan.WhereClauseSubPlan;
 import org.apache.phoenix.execute.ScanPlan;
 import org.apache.phoenix.execute.TupleProjectionPlan;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.RowValueConstructorExpression;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
@@ -47,7 +50,6 @@ import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.join.HashJoinInfo;
-import org.apache.phoenix.join.TupleProjector;
 import org.apache.phoenix.parse.HintNode.Hint;
 import org.apache.phoenix.parse.JoinTableNode.JoinType;
 import org.apache.phoenix.parse.ParseNode;
@@ -59,11 +61,11 @@ import org.apache.phoenix.schema.AmbiguousColumnException;
 import org.apache.phoenix.schema.ColumnNotFoundException;
 import org.apache.phoenix.schema.PDatum;
 import org.apache.phoenix.schema.PTable;
-import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.util.ScanUtil;
 
 import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
 
 
@@ -349,30 +351,49 @@ public class QueryCompiler {
     }
     
     protected QueryPlan compileSingleQuery(StatementContext context, SelectStatement select, List<Object> binds, boolean asSubquery, boolean allowPageFilter) throws SQLException{
+        SelectStatement innerSelect = select.getInnerSelectStatement();
+        if (innerSelect == null) {
+            return compileSingleFlatQuery(context, select, binds, asSubquery, allowPageFilter, null, null);
+        }
+        
+        QueryPlan innerPlan = compileSubquery(innerSelect);
+        TupleProjector tupleProjector = new TupleProjector(innerPlan.getProjector());
+        innerPlan = new TupleProjectionPlan(innerPlan, tupleProjector, null);
+        
+        // Replace the original resolver and table with those having compiled type info.
+        TableRef tableRef = context.getResolver().getTables().get(0);        
+        ColumnResolver resolver = FromCompiler.getResolverForCompiledDerivedTable(statement.getConnection(), tableRef, innerPlan.getProjector());
+        context.setResolver(resolver);
+        tableRef = resolver.getTables().get(0);
+        context.setCurrentTable(tableRef);
+        
+        return compileSingleFlatQuery(context, select, binds, asSubquery, allowPageFilter, innerPlan, innerPlan.getOrderBy().getOrderByExpressions().isEmpty() ? tupleProjector : null);
+    }
+    
+    protected QueryPlan compileSingleFlatQuery(StatementContext context, SelectStatement select, List<Object> binds, boolean asSubquery, boolean allowPageFilter, QueryPlan innerPlan, TupleProjector innerPlanTupleProjector) throws SQLException{
         PhoenixConnection connection = statement.getConnection();
         ColumnResolver resolver = context.getResolver();
         TableRef tableRef = context.getCurrentTable();
         PTable table = tableRef.getTable();
         
-        // TODO PHOENIX-944. See DerivedTableIT for a list of unsupported cases.
-        if (table.getType() == PTableType.SUBQUERY)
-            throw new SQLFeatureNotSupportedException("Complex nested queries not supported.");
-        
         ParseNode viewWhere = null;
         if (table.getViewStatement() != null) {
             viewWhere = new SQLParser(table.getViewStatement()).parseQuery().getWhere();
         }
         Integer limit = LimitCompiler.compile(context, select);
 
-        GroupBy groupBy = GroupByCompiler.compile(context, select);
+        GroupBy groupBy = GroupByCompiler.compile(context, select, innerPlanTupleProjector);
         // Optimize the HAVING clause by finding any group by expressions that can be moved
         // to the WHERE clause
         select = HavingCompiler.rewrite(context, select, groupBy);
         Expression having = HavingCompiler.compile(context, select, groupBy);
         // Don't pass groupBy when building where clause expression, because we do not want to wrap these
         // expressions as group by key expressions since they're pre, not post filtered.
-        context.setResolver(FromCompiler.getResolverForQuery(select, connection));
-        Set<SubqueryParseNode> subqueries = WhereCompiler.compile(context, select, viewWhere);
+        if (innerPlan == null) {
+            context.setResolver(FromCompiler.getResolverForQuery(select, connection));
+        }
+        Set<SubqueryParseNode> subqueries = Sets.<SubqueryParseNode> newHashSet();
+        Expression where = WhereCompiler.compile(context, select, viewWhere, subqueries);
         context.setResolver(resolver); // recover resolver
         OrderBy orderBy = OrderByCompiler.compile(context, select, groupBy, limit); 
         RowProjector projector = ProjectionCompiler.compile(context, select, groupBy, asSubquery ? Collections.<PDatum>emptyList() : targetColumns);
@@ -386,10 +407,14 @@ public class QueryCompiler {
                 limit = maxRows;
             }
         }
-        ParallelIteratorFactory parallelIteratorFactory = asSubquery ? null : this.parallelIteratorFactory;
-        QueryPlan plan = select.isAggregate() || select.isDistinct() ? 
-                  new AggregatePlan(context, select, tableRef, projector, limit, orderBy, parallelIteratorFactory, groupBy, having)
-                : new ScanPlan(context, select, tableRef, projector, limit, orderBy, parallelIteratorFactory, allowPageFilter);
+        
+        QueryPlan plan = innerPlan;
+        if (plan == null) {
+            ParallelIteratorFactory parallelIteratorFactory = asSubquery ? null : this.parallelIteratorFactory;
+            plan = select.isAggregate() || select.isDistinct() ? 
+                      new AggregatePlan(context, select, tableRef, projector, limit, orderBy, parallelIteratorFactory, groupBy, having)
+                    : new ScanPlan(context, select, tableRef, projector, limit, orderBy, parallelIteratorFactory, allowPageFilter);
+        }
         if (!subqueries.isEmpty()) {
             int count = subqueries.size();
             WhereClauseSubPlan[] subPlans = new WhereClauseSubPlan[count];
@@ -401,6 +426,13 @@ public class QueryCompiler {
             plan = HashJoinPlan.create(select, plan, null, subPlans);
         }
         
+        if (innerPlan != null) {
+            plan =  select.isAggregate() || select.isDistinct() ?
+                      new ClientAggregatePlan(context, select, tableRef, projector, limit, where, orderBy, groupBy, having, plan)
+                    : new ClientScanPlan(context, select, tableRef, projector, limit, where, orderBy, plan);
+
+        }
+        
         return plan;
     }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java
index 4b37259..3e470ce 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubqueryRewriter.java
@@ -238,7 +238,7 @@ public class SubqueryRewriter extends ParseNodeRewriter {
                 groupbyNodes.add(aliasedNode.getNode());
             }
             groupbyNodes.addAll(subquery.getGroupBy());
-            subquery = NODE_FACTORY.select(subquery, selectNodes, where, groupbyNodes, true);
+            subquery = NODE_FACTORY.select(subquery, subquery.isDistinct(), selectNodes, where, groupbyNodes, true);
         }
         
         ParseNode onNode = conditionExtractor.getJoinCondition();
@@ -323,11 +323,11 @@ public class SubqueryRewriter extends ParseNodeRewriter {
         }
         
         if (derivedTableAlias == null) {
-            subquery = NODE_FACTORY.select(subquery, selectNodes, where, groupbyNodes, true);
+            subquery = NODE_FACTORY.select(subquery, false, selectNodes, where, groupbyNodes, true);
         } else {
             List<ParseNode> derivedTableGroupBy = Lists.newArrayListWithExpectedSize(subquery.getGroupBy().size() + groupbyNodes.size());
-            derivedTableGroupBy.addAll(subquery.getGroupBy());
             derivedTableGroupBy.addAll(groupbyNodes);
+            derivedTableGroupBy.addAll(subquery.getGroupBy());
             List<AliasedNode> derivedTableSelect = Lists.newArrayListWithExpectedSize(aliasedNodes.size() + selectNodes.size() - 1);
             derivedTableSelect.addAll(aliasedNodes);
             for (int i = 1; i < selectNodes.size(); i++) {
@@ -338,8 +338,8 @@ public class SubqueryRewriter extends ParseNodeRewriter {
                 selectNodes.set(i, aliasedNode);
                 groupbyNodes.set(i - 1, aliasedNode.getNode());
             }
-            SelectStatement derivedTableStmt = NODE_FACTORY.select(subquery, derivedTableSelect, where, derivedTableGroupBy, true);
-            subquery = NODE_FACTORY.select(Collections.singletonList(NODE_FACTORY.derivedTable(derivedTableAlias, derivedTableStmt)), subquery.getHint(), false, selectNodes, null, groupbyNodes, null, Collections.<OrderByNode> emptyList(), null, subquery.getBindCount(), true, subquery.hasSequence());
+            SelectStatement derivedTableStmt = NODE_FACTORY.select(subquery, subquery.isDistinct(), derivedTableSelect, where, derivedTableGroupBy, true);
+            subquery = NODE_FACTORY.select(Collections.singletonList(NODE_FACTORY.derivedTable(derivedTableAlias, derivedTableStmt)), subquery.getHint(), false, selectNodes, null, groupbyNodes, null, Collections.<OrderByNode> emptyList(), null, subquery.getBindCount(), true, false);
         }
         
         ParseNode onNode = conditionExtractor.getJoinCondition();

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/compile/TrackOrderPreservingExpressionCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/TrackOrderPreservingExpressionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/TrackOrderPreservingExpressionCompiler.java
index 44f9527..9fd6837 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/TrackOrderPreservingExpressionCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/TrackOrderPreservingExpressionCompiler.java
@@ -24,8 +24,10 @@ import java.util.Comparator;
 import java.util.List;
 
 import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.LiteralExpression;
+import org.apache.phoenix.expression.RowKeyColumnExpression;
 import org.apache.phoenix.expression.function.FunctionExpression;
 import org.apache.phoenix.expression.function.FunctionExpression.OrderPreserving;
 import org.apache.phoenix.parse.CaseParseNode;
@@ -35,10 +37,8 @@ import org.apache.phoenix.parse.MultiplyParseNode;
 import org.apache.phoenix.parse.SubtractParseNode;
 import org.apache.phoenix.schema.ColumnRef;
 import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.SortOrder;
-import org.apache.phoenix.schema.PTable.IndexType;
-import org.apache.phoenix.util.SchemaUtil;
-
 import com.google.common.collect.Lists;
 
 /**
@@ -57,12 +57,13 @@ public class TrackOrderPreservingExpressionCompiler extends ExpressionCompiler {
     private final List<Entry> entries;
     private final Ordering ordering;
     private final int positionOffset;
+    private final TupleProjector tupleProjector; // for derived-table query compilation
     private OrderPreserving orderPreserving = OrderPreserving.YES;
     private ColumnRef columnRef;
     private boolean isOrderPreserving = true;
     private Boolean isReverse;
     
-    TrackOrderPreservingExpressionCompiler(StatementContext context, GroupBy groupBy, int expectedEntrySize, Ordering ordering) {
+    TrackOrderPreservingExpressionCompiler(StatementContext context, GroupBy groupBy, int expectedEntrySize, Ordering ordering, TupleProjector tupleProjector) {
         super(context, groupBy);
         PTable table = context.getResolver().getTables().get(0).getTable();
         boolean isSalted = table.getBucketNum() != null;
@@ -72,6 +73,7 @@ public class TrackOrderPreservingExpressionCompiler extends ExpressionCompiler {
         positionOffset = (isSalted ? 1 : 0) + (isMultiTenant ? 1 : 0) + (isSharedViewIndex ? 1 : 0);
         entries = Lists.newArrayListWithExpectedSize(expectedEntrySize);
         this.ordering = ordering;
+        this.tupleProjector = tupleProjector;
     }
     
     public Boolean isReverse() {
@@ -159,7 +161,7 @@ public class TrackOrderPreservingExpressionCompiler extends ExpressionCompiler {
         ColumnRef ref = super.resolveColumn(node);
         // If we encounter any non PK column, then we can't aggregate on-the-fly
         // because the distinct groups have no correlation to the KV column value
-        if (!SchemaUtil.isPKColumn(ref.getColumn())) {
+        if (getColumnPKPosition(ref) < 0) {
             orderPreserving = OrderPreserving.NO;
         }
         
@@ -173,6 +175,17 @@ public class TrackOrderPreservingExpressionCompiler extends ExpressionCompiler {
         }
         return ref;
     }
+    
+    private int getColumnPKPosition(ColumnRef ref) {
+        if (tupleProjector != null && ref.getTable().getType() == PTableType.SUBQUERY) {
+            Expression expression = tupleProjector.getExpressions()[ref.getColumnPosition()];
+            if (expression instanceof RowKeyColumnExpression) {
+                return ((RowKeyColumnExpression) expression).getPosition();
+            }
+        }
+        
+        return ref.getPKSlotPosition();
+    }
 
     public boolean addEntry(Expression expression) {
         if (expression instanceof LiteralExpression) {
@@ -206,7 +219,7 @@ public class TrackOrderPreservingExpressionCompiler extends ExpressionCompiler {
         return entries;
     }
 
-    public static class Entry {
+    public class Entry {
         private final Expression expression;
         private final ColumnRef columnRef;
         private final OrderPreserving orderPreserving;
@@ -222,7 +235,7 @@ public class TrackOrderPreservingExpressionCompiler extends ExpressionCompiler {
         }
 
         public int getPkPosition() {
-            return columnRef.getPKSlotPosition();
+            return getColumnPKPosition(columnRef);
         }
 
         public int getColumnPosition() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
index 51d0ffc..2c49fed 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
@@ -41,7 +41,6 @@ import org.apache.phoenix.filter.SingleCFCQKeyValueComparisonFilter;
 import org.apache.phoenix.filter.SingleCQKeyValueComparisonFilter;
 import org.apache.phoenix.parse.ColumnParseNode;
 import org.apache.phoenix.parse.FilterableStatement;
-import org.apache.phoenix.parse.HintNode.Hint;
 import org.apache.phoenix.parse.ParseNode;
 import org.apache.phoenix.parse.ParseNodeFactory;
 import org.apache.phoenix.parse.SelectStatement;
@@ -78,8 +77,8 @@ public class WhereCompiler {
     private WhereCompiler() {
     }
 
-    public static Set<SubqueryParseNode> compile(StatementContext context, FilterableStatement statement) throws SQLException {
-        return compile(context, statement, null);
+    public static Expression compile(StatementContext context, FilterableStatement statement) throws SQLException {
+        return compile(context, statement, null, null);
     }
     
     /**
@@ -92,8 +91,8 @@ public class WhereCompiler {
      * @throws ColumnNotFoundException if column name could not be resolved
      * @throws AmbiguousColumnException if an unaliased column name is ambiguous across multiple tables
      */
-    public static Set<SubqueryParseNode> compile(StatementContext context, FilterableStatement statement, ParseNode viewWhere) throws SQLException {
-        return compile(context, statement, viewWhere, Collections.<Expression>emptyList(), false);
+    public static Expression compile(StatementContext context, FilterableStatement statement, ParseNode viewWhere, Set<SubqueryParseNode> subqueryNodes) throws SQLException {
+        return compile(context, statement, viewWhere, Collections.<Expression>emptyList(), false, subqueryNodes);
     }
     
     /**
@@ -106,18 +105,20 @@ public class WhereCompiler {
      * @throws ColumnNotFoundException if column name could not be resolved
      * @throws AmbiguousColumnException if an unaliased column name is ambiguous across multiple tables
      */    
-    public static Set<SubqueryParseNode> compile(StatementContext context, FilterableStatement statement, ParseNode viewWhere, List<Expression> dynamicFilters, boolean hashJoinOptimization) throws SQLException {
+    public static Expression compile(StatementContext context, FilterableStatement statement, ParseNode viewWhere, List<Expression> dynamicFilters, boolean hashJoinOptimization, Set<SubqueryParseNode> subqueryNodes) throws SQLException {
         ParseNode where = statement.getWhere();
-        Set<SubqueryParseNode> subqueryNodes = Sets.<SubqueryParseNode> newHashSet();
-        SubqueryParseNodeVisitor subqueryVisitor = new SubqueryParseNodeVisitor(context, subqueryNodes);
-        if (where != null) {
-            where.accept(subqueryVisitor);
-        }
-        if (viewWhere != null) {
-            viewWhere.accept(subqueryVisitor);
+        if (subqueryNodes != null) { // if the subqueryNodes passed in is null, we assume there will be no sub-queries in the WHERE clause.
+            SubqueryParseNodeVisitor subqueryVisitor = new SubqueryParseNodeVisitor(context, subqueryNodes);
+            if (where != null) {
+                where.accept(subqueryVisitor);
+            }
+            if (viewWhere != null) {
+                viewWhere.accept(subqueryVisitor);
+            }
+            if (!subqueryNodes.isEmpty()) {
+                return null;
+            }
         }
-        if (!subqueryNodes.isEmpty())
-            return subqueryNodes;
         
         Set<Expression> extractedNodes = Sets.<Expression>newHashSet();
         WhereExpressionCompiler whereCompiler = new WhereExpressionCompiler(context);
@@ -142,7 +143,7 @@ public class WhereCompiler {
         expression = WhereOptimizer.pushKeyExpressionsToScan(context, statement, expression, extractedNodes);
         setScanFilter(context, statement, expression, whereCompiler.disambiguateWithFamily, hashJoinOptimization);
 
-        return subqueryNodes;
+        return expression;
     }
     
     private static class WhereExpressionCompiler extends ExpressionCompiler {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
index 8add152..91a9bdd 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/GroupedAggregateRegionObserver.java
@@ -52,6 +52,7 @@ import org.apache.hadoop.io.WritableUtils;
 import org.apache.phoenix.cache.GlobalCache;
 import org.apache.phoenix.cache.TenantCache;
 import org.apache.phoenix.cache.aggcache.SpillableGroupByCache;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.ExpressionType;
 import org.apache.phoenix.expression.aggregator.Aggregator;
@@ -60,7 +61,6 @@ import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.join.HashJoinInfo;
-import org.apache.phoenix.join.TupleProjector;
 import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.PDataType;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
index 8e0d42d..724122d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
@@ -34,11 +34,11 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.phoenix.cache.GlobalCache;
 import org.apache.phoenix.cache.HashCache;
 import org.apache.phoenix.cache.TenantCache;
+import org.apache.phoenix.execute.TupleProjector;
+import org.apache.phoenix.execute.TupleProjector.ProjectedValueTuple;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
 import org.apache.phoenix.join.HashJoinInfo;
-import org.apache.phoenix.join.TupleProjector;
-import org.apache.phoenix.join.TupleProjector.ProjectedValueTuple;
 import org.apache.phoenix.parse.JoinTableNode.JoinType;
 import org.apache.phoenix.schema.IllegalDataException;
 import org.apache.phoenix.schema.KeyValueSchema;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
index 8c72dd5..1672fd7 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
@@ -26,6 +26,7 @@ import java.util.List;
 import java.util.Set;
 
 import com.google.common.collect.Sets;
+
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
@@ -41,6 +42,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.phoenix.cache.GlobalCache;
 import org.apache.phoenix.cache.TenantCache;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.KeyValueColumnExpression;
 import org.apache.phoenix.expression.OrderByExpression;
@@ -51,7 +53,6 @@ import org.apache.phoenix.iterate.OrderedResultIterator;
 import org.apache.phoenix.iterate.RegionScannerResultIterator;
 import org.apache.phoenix.iterate.ResultIterator;
 import org.apache.phoenix.join.HashJoinInfo;
-import org.apache.phoenix.join.TupleProjector;
 import org.apache.phoenix.memory.MemoryManager.MemoryChunk;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.KeyValueSchema;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
index 710409f..d915948 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
@@ -57,6 +57,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.phoenix.coprocessor.generated.PTableProtos;
 import org.apache.phoenix.exception.ValueTypeIncompatibleException;
+import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.ExpressionType;
 import org.apache.phoenix.expression.aggregator.Aggregator;
@@ -70,7 +71,6 @@ import org.apache.phoenix.index.IndexMaintainer;
 import org.apache.phoenix.index.PhoenixIndexCodec;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.join.HashJoinInfo;
-import org.apache.phoenix.join.TupleProjector;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.query.QueryServicesOptions;
 import org.apache.phoenix.schema.ConstraintViolationException;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java
new file mode 100644
index 0000000..a9347e1
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientAggregatePlan.java
@@ -0,0 +1,229 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.execute;
+
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.phoenix.compile.ExplainPlan;
+import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
+import org.apache.phoenix.compile.OrderByCompiler.OrderBy;
+import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.compile.RowProjector;
+import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.coprocessor.BaseScannerRegionObserver;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.expression.OrderByExpression;
+import org.apache.phoenix.expression.aggregator.Aggregators;
+import org.apache.phoenix.expression.aggregator.ServerAggregators;
+import org.apache.phoenix.iterate.AggregatingResultIterator;
+import org.apache.phoenix.iterate.BaseGroupedAggregatingResultIterator;
+import org.apache.phoenix.iterate.DistinctAggregatingResultIterator;
+import org.apache.phoenix.iterate.FilterAggregatingResultIterator;
+import org.apache.phoenix.iterate.FilterResultIterator;
+import org.apache.phoenix.iterate.GroupedAggregatingResultIterator;
+import org.apache.phoenix.iterate.LimitingResultIterator;
+import org.apache.phoenix.iterate.LookAheadResultIterator;
+import org.apache.phoenix.iterate.OrderedAggregatingResultIterator;
+import org.apache.phoenix.iterate.OrderedResultIterator;
+import org.apache.phoenix.iterate.PeekingResultIterator;
+import org.apache.phoenix.iterate.ResultIterator;
+import org.apache.phoenix.iterate.SequenceResultIterator;
+import org.apache.phoenix.iterate.UngroupedAggregatingResultIterator;
+import org.apache.phoenix.parse.FilterableStatement;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.TableRef;
+import org.apache.phoenix.schema.tuple.MultiKeyValueTuple;
+import org.apache.phoenix.schema.tuple.Tuple;
+import org.apache.phoenix.util.TupleUtil;
+
+import com.google.common.collect.Lists;
+
+public class ClientAggregatePlan extends ClientProcessingPlan {
+    private final GroupBy groupBy;
+    private final Expression having;
+    private final Aggregators serverAggregators;
+    private final Aggregators clientAggregators;
+    
+    public ClientAggregatePlan(StatementContext context, FilterableStatement statement, TableRef table, RowProjector projector,
+            Integer limit, Expression where, OrderBy orderBy, GroupBy groupBy, Expression having, QueryPlan delegate) {
+        super(context, statement, table, projector, limit, where, orderBy, delegate);
+        this.groupBy = groupBy;
+        this.having = having;
+        this.serverAggregators =
+                ServerAggregators.deserialize(context.getScan()
+                        .getAttribute(BaseScannerRegionObserver.AGGREGATORS), QueryServicesOptions.withDefaults().getConfiguration());
+        this.clientAggregators = context.getAggregationManager().getAggregators();
+    }
+
+    @Override
+    public ResultIterator iterator() throws SQLException {
+        ResultIterator iterator = delegate.iterator();
+        if (where != null) {
+            iterator = new FilterResultIterator(iterator, where);
+        }
+        
+        AggregatingResultIterator aggResultIterator;
+        if (groupBy.isEmpty()) {
+            aggResultIterator = new ClientUngroupedAggregatingResultIterator(LookAheadResultIterator.wrap(iterator), serverAggregators);
+            aggResultIterator = new UngroupedAggregatingResultIterator(LookAheadResultIterator.wrap(aggResultIterator), clientAggregators);
+        } else {
+            if (!groupBy.isOrderPreserving()) {
+                int thresholdBytes = context.getConnection().getQueryServices().getProps().getInt(
+                        QueryServices.SPOOL_THRESHOLD_BYTES_ATTRIB, QueryServicesOptions.DEFAULT_SPOOL_THRESHOLD_BYTES);
+                List<Expression> keyExpressions = groupBy.getKeyExpressions();
+                List<OrderByExpression> keyExpressionOrderBy = Lists.newArrayListWithExpectedSize(keyExpressions.size());
+                for (Expression keyExpression : keyExpressions) {
+                    keyExpressionOrderBy.add(new OrderByExpression(keyExpression, false, true));
+                }
+                iterator = new OrderedResultIterator(iterator, keyExpressionOrderBy, thresholdBytes, limit, projector.getEstimatedRowByteSize());
+            }
+            aggResultIterator = new ClientGroupedAggregatingResultIterator(LookAheadResultIterator.wrap(iterator), serverAggregators, groupBy.getExpressions());
+            aggResultIterator = new GroupedAggregatingResultIterator(LookAheadResultIterator.wrap(aggResultIterator), clientAggregators);
+        }
+
+        if (having != null) {
+            aggResultIterator = new FilterAggregatingResultIterator(aggResultIterator, having);
+        }
+        
+        if (statement.isDistinct() && statement.isAggregate()) { // Dedup on client if select distinct and aggregation
+            aggResultIterator = new DistinctAggregatingResultIterator(aggResultIterator, getProjector());
+        }
+
+        ResultIterator resultScanner = aggResultIterator;
+        if (orderBy.getOrderByExpressions().isEmpty()) {
+            if (limit != null) {
+                resultScanner = new LimitingResultIterator(aggResultIterator, limit);
+            }
+        } else {
+            int thresholdBytes = context.getConnection().getQueryServices().getProps().getInt(
+                    QueryServices.SPOOL_THRESHOLD_BYTES_ATTRIB, QueryServicesOptions.DEFAULT_SPOOL_THRESHOLD_BYTES);
+            resultScanner = new OrderedAggregatingResultIterator(aggResultIterator, orderBy.getOrderByExpressions(), thresholdBytes, limit);
+        }
+        if (context.getSequenceManager().getSequenceCount() > 0) {
+            resultScanner = new SequenceResultIterator(resultScanner, context.getSequenceManager());
+        }
+        
+        return resultScanner;
+    }
+
+    @Override
+    public ExplainPlan getExplainPlan() throws SQLException {
+        List<String> planSteps = Lists.newArrayList(delegate.getExplainPlan().getPlanSteps());
+        if (where != null) {
+            planSteps.add("CLIENT FILTER BY " + where.toString());
+        }
+        if (!groupBy.isEmpty()) {
+            if (!groupBy.isOrderPreserving()) {
+                planSteps.add("CLIENT SORTED BY " + groupBy.getKeyExpressions().toString());
+            }
+            planSteps.add("CLIENT AGGREGATE INTO DISTINCT ROWS BY " + groupBy.getExpressions().toString());
+        } else {
+            planSteps.add("CLIENT AGGREGATE INTO SINGLE ROW");            
+        }
+        if (having != null) {
+            planSteps.add("CLIENT AFTER-AGGREGATION FILTER BY " + having.toString());
+        }
+        if (statement.isDistinct() && statement.isAggregate()) {
+            planSteps.add("CLIENT DISTINCT ON " + projector.toString());
+        }
+        if (orderBy.getOrderByExpressions().isEmpty()) {
+            if (limit != null) {
+                planSteps.add("CLIENT " + limit + " ROW LIMIT");
+            }
+        } else {
+            planSteps.add("CLIENT" + (limit == null ? "" : " TOP " + limit + " ROW"  + (limit == 1 ? "" : "S"))  + " SORTED BY " + orderBy.getOrderByExpressions().toString());
+        }
+        if (context.getSequenceManager().getSequenceCount() > 0) {
+            int nSequences = context.getSequenceManager().getSequenceCount();
+            planSteps.add("CLIENT RESERVE VALUES FROM " + nSequences + " SEQUENCE" + (nSequences == 1 ? "" : "S"));
+        }
+        
+        return new ExplainPlan(planSteps);
+    }
+
+    @Override
+    public GroupBy getGroupBy() {
+        return groupBy;
+    }
+    
+    private static class ClientGroupedAggregatingResultIterator extends BaseGroupedAggregatingResultIterator {
+        private final List<Expression> groupByExpressions;
+
+        public ClientGroupedAggregatingResultIterator(PeekingResultIterator iterator, Aggregators aggregators, List<Expression> groupByExpressions) {
+            super(iterator, aggregators);
+            this.groupByExpressions = groupByExpressions;
+        }
+
+        @Override
+        protected ImmutableBytesWritable getGroupingKey(Tuple tuple,
+                ImmutableBytesWritable ptr) throws SQLException {
+            try {
+                ImmutableBytesWritable key = TupleUtil.getConcatenatedValue(tuple, groupByExpressions);
+                ptr.set(key.get(), key.getOffset(), key.getLength());
+                return ptr;
+            } catch (IOException e) {
+                throw new SQLException(e);
+            }
+        }
+
+        @Override
+        protected Tuple wrapKeyValueAsResult(KeyValue keyValue) {
+            return new MultiKeyValueTuple(Collections.<Cell> singletonList(keyValue));
+        }
+
+        @Override
+        public String toString() {
+            return "ClientGroupedAggregatingResultIterator [resultIterator=" 
+                    + resultIterator + ", aggregators=" + aggregators + ", groupByExpressions="
+                    + groupByExpressions + "]";
+        }
+    }
+
+    private static class ClientUngroupedAggregatingResultIterator extends BaseGroupedAggregatingResultIterator {
+
+        public ClientUngroupedAggregatingResultIterator(PeekingResultIterator iterator, Aggregators aggregators) {
+            super(iterator, aggregators);
+        }
+
+        @Override
+        protected ImmutableBytesWritable getGroupingKey(Tuple tuple,
+                ImmutableBytesWritable ptr) throws SQLException {
+            tuple.getKey(ptr);
+            return ptr;
+        }
+
+        @Override
+        protected Tuple wrapKeyValueAsResult(KeyValue keyValue)
+                throws SQLException {
+            return new MultiKeyValueTuple(Collections.<Cell> singletonList(keyValue));
+        }
+
+        @Override
+        public String toString() {
+            return "ClientUngroupedAggregatingResultIterator [resultIterator=" 
+                    + resultIterator + ", aggregators=" + aggregators + "]";
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientProcessingPlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientProcessingPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientProcessingPlan.java
new file mode 100644
index 0000000..8e787b4
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientProcessingPlan.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.execute;
+
+import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.compile.RowProjector;
+import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.compile.OrderByCompiler.OrderBy;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.parse.FilterableStatement;
+import org.apache.phoenix.schema.TableRef;
+
+/**
+ * Query plan that does where, order-by limit at client side, which is
+ * for derived-table queries that cannot be flattened by SubselectRewriter.
+ */
+public abstract class ClientProcessingPlan extends DelegateQueryPlan {
+    protected final StatementContext context;
+    protected final FilterableStatement statement;
+    protected final TableRef table;
+    protected final RowProjector projector;
+    protected final Integer limit;
+    protected final Expression where;
+    protected final OrderBy orderBy;
+
+    public ClientProcessingPlan(StatementContext context, FilterableStatement statement, TableRef table, 
+            RowProjector projector, Integer limit, Expression where, OrderBy orderBy, QueryPlan delegate) {
+        super(delegate);
+        this.context = context;
+        this.statement = statement;
+        this.table = table;
+        this.projector = projector;
+        this.limit = limit;
+        this.where = where;
+        this.orderBy = orderBy;
+    }
+    
+    @Override
+    public StatementContext getContext() {
+        return context;
+    }
+
+    @Override
+    public TableRef getTableRef() {
+        return table;
+    }
+
+    @Override
+    public RowProjector getProjector() {
+        return projector;
+    }
+
+    @Override
+    public Integer getLimit() {
+        return limit;
+    }
+
+    @Override
+    public OrderBy getOrderBy() {
+        return orderBy;
+    }
+
+    @Override
+    public FilterableStatement getStatement() {
+        return statement;
+    }
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientScanPlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientScanPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientScanPlan.java
new file mode 100644
index 0000000..01fbd11
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/ClientScanPlan.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.execute;
+
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.phoenix.compile.ExplainPlan;
+import org.apache.phoenix.compile.OrderByCompiler.OrderBy;
+import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.compile.RowProjector;
+import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.iterate.FilterResultIterator;
+import org.apache.phoenix.iterate.LimitingResultIterator;
+import org.apache.phoenix.iterate.OrderedResultIterator;
+import org.apache.phoenix.iterate.ResultIterator;
+import org.apache.phoenix.iterate.SequenceResultIterator;
+import org.apache.phoenix.parse.FilterableStatement;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.TableRef;
+
+import com.google.common.collect.Lists;
+
+public class ClientScanPlan extends ClientProcessingPlan {
+
+    public ClientScanPlan(StatementContext context,
+            FilterableStatement statement, TableRef table,
+            RowProjector projector, Integer limit, Expression where,
+            OrderBy orderBy, QueryPlan delegate) {
+        super(context, statement, table, projector, limit, where, orderBy,
+                delegate);
+    }
+
+    @Override
+    public ResultIterator iterator() throws SQLException {
+        ResultIterator iterator = delegate.iterator();
+        if (where != null) {
+            iterator = new FilterResultIterator(iterator, where);
+        }
+        
+        if (!orderBy.getOrderByExpressions().isEmpty()) { // TopN
+            int thresholdBytes = context.getConnection().getQueryServices().getProps().getInt(
+                    QueryServices.SPOOL_THRESHOLD_BYTES_ATTRIB, QueryServicesOptions.DEFAULT_SPOOL_THRESHOLD_BYTES);
+            iterator = new OrderedResultIterator(iterator, orderBy.getOrderByExpressions(), thresholdBytes, limit, projector.getEstimatedRowByteSize());
+        } else if (limit != null) {
+            iterator = new LimitingResultIterator(iterator, limit);
+        }
+        
+        if (context.getSequenceManager().getSequenceCount() > 0) {
+            iterator = new SequenceResultIterator(iterator, context.getSequenceManager());
+        }
+        
+        return iterator;
+    }
+
+    @Override
+    public ExplainPlan getExplainPlan() throws SQLException {
+        List<String> planSteps = Lists.newArrayList(delegate.getExplainPlan().getPlanSteps());
+        if (where != null) {
+            planSteps.add("CLIENT FILTER BY " + where.toString());
+        }
+        if (!orderBy.getOrderByExpressions().isEmpty()) {
+            planSteps.add("CLIENT" + (limit == null ? "" : " TOP " + limit + " ROW"  + (limit == 1 ? "" : "S"))  + " SORTED BY " + orderBy.getOrderByExpressions().toString());
+        } else if (limit != null) {
+            planSteps.add("CLIENT " + limit + " ROW LIMIT");
+        }
+        if (context.getSequenceManager().getSequenceCount() > 0) {
+            int nSequences = context.getSequenceManager().getSequenceCount();
+            planSteps.add("CLIENT RESERVE VALUES FROM " + nSequences + " SEQUENCE" + (nSequences == 1 ? "" : "S"));
+        }
+        
+        return new ExplainPlan(planSteps);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java
index fce4245..c6ed0ac 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/HashJoinPlan.java
@@ -51,6 +51,7 @@ import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.InListExpression;
 import org.apache.phoenix.expression.LiteralExpression;
 import org.apache.phoenix.expression.RowValueConstructorExpression;
+import org.apache.phoenix.iterate.FilterResultIterator;
 import org.apache.phoenix.iterate.ResultIterator;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.job.JobManager.JobCallable;
@@ -76,7 +77,7 @@ import com.google.common.collect.Lists;
 public class HashJoinPlan extends DelegateQueryPlan {
     private static final Log LOG = LogFactory.getLog(HashJoinPlan.class);
 
-    private final FilterableStatement statement;
+    private final SelectStatement statement;
     private final HashJoinInfo joinInfo;
     private final SubPlan[] subPlans;
     private final boolean recompileWhereClause;
@@ -88,14 +89,13 @@ public class HashJoinPlan extends DelegateQueryPlan {
     private AtomicLong firstJobEndTime;
     private List<Expression> keyRangeExpressions;
     
-    public static HashJoinPlan create(FilterableStatement statement, 
+    public static HashJoinPlan create(SelectStatement statement, 
             QueryPlan plan, HashJoinInfo joinInfo, SubPlan[] subPlans) {
-        if (plan instanceof BaseQueryPlan)
+        if (!(plan instanceof HashJoinPlan))
             return new HashJoinPlan(statement, plan, joinInfo, subPlans, joinInfo == null);
         
-        assert (plan instanceof HashJoinPlan);
         HashJoinPlan hashJoinPlan = (HashJoinPlan) plan;
-        assert hashJoinPlan.joinInfo == null;
+        assert (hashJoinPlan.joinInfo == null && hashJoinPlan.delegate instanceof BaseQueryPlan);
         SubPlan[] mergedSubPlans = new SubPlan[hashJoinPlan.subPlans.length + subPlans.length];
         int i = 0;
         for (SubPlan subPlan : hashJoinPlan.subPlans) {
@@ -107,7 +107,7 @@ public class HashJoinPlan extends DelegateQueryPlan {
         return new HashJoinPlan(statement, hashJoinPlan.delegate, joinInfo, mergedSubPlans, true);
     }
     
-    private HashJoinPlan(FilterableStatement statement, 
+    private HashJoinPlan(SelectStatement statement, 
             QueryPlan plan, HashJoinInfo joinInfo, SubPlan[] subPlans, boolean recompileWhereClause) {
         super(plan);
         this.statement = statement;
@@ -170,6 +170,7 @@ public class HashJoinPlan extends DelegateQueryPlan {
             throw firstException;
         }
         
+        Expression postFilter = null;
         boolean hasKeyRangeExpressions = keyRangeExpressions != null && !keyRangeExpressions.isEmpty();
         if (recompileWhereClause || hasKeyRangeExpressions) {
             StatementContext context = delegate.getContext();
@@ -177,10 +178,10 @@ public class HashJoinPlan extends DelegateQueryPlan {
             ParseNode viewWhere = table.getViewStatement() == null ? null : new SQLParser(table.getViewStatement()).parseQuery().getWhere();
             context.setResolver(FromCompiler.getResolverForQuery((SelectStatement) (delegate.getStatement()), delegate.getContext().getConnection()));
             if (recompileWhereClause) {
-                WhereCompiler.compile(delegate.getContext(), delegate.getStatement(), viewWhere);                
+                postFilter = WhereCompiler.compile(delegate.getContext(), delegate.getStatement(), viewWhere, null);
             }
             if (hasKeyRangeExpressions) {
-                WhereCompiler.compile(delegate.getContext(), delegate.getStatement(), viewWhere, keyRangeExpressions, true);
+                WhereCompiler.compile(delegate.getContext(), delegate.getStatement(), viewWhere, keyRangeExpressions, true, null);
             }
         }
 
@@ -189,7 +190,12 @@ public class HashJoinPlan extends DelegateQueryPlan {
             HashJoinInfo.serializeHashJoinIntoScan(scan, joinInfo);
         }
         
-        return ((BaseQueryPlan) delegate).iterator(dependencies);
+        ResultIterator iterator = joinInfo == null ? delegate.iterator() : ((BaseQueryPlan) delegate).iterator(dependencies);
+        if (statement.getInnerSelectStatement() != null && postFilter != null) {
+            iterator = new FilterResultIterator(iterator, postFilter);
+        }
+        
+        return iterator;
     }
 
     private Expression createKeyRangeExpression(Expression lhsExpression,

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f004e135/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjectionPlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjectionPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjectionPlan.java
index 410d386..c9cbd15 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjectionPlan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjectionPlan.java
@@ -20,14 +20,12 @@ package org.apache.phoenix.execute;
 import java.sql.SQLException;
 import java.util.List;
 
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.phoenix.compile.ExplainPlan;
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.iterate.DelegateResultIterator;
+import org.apache.phoenix.iterate.FilterResultIterator;
 import org.apache.phoenix.iterate.ResultIterator;
-import org.apache.phoenix.join.TupleProjector;
-import org.apache.phoenix.schema.IllegalDataException;
 import org.apache.phoenix.schema.tuple.Tuple;
 
 import com.google.common.collect.Lists;
@@ -49,52 +47,33 @@ public class TupleProjectionPlan extends DelegateQueryPlan {
         if (postFilter != null) {
             planSteps.add("CLIENT FILTER BY " + postFilter.toString());
         }
-        
+
         return new ExplainPlan(planSteps);
     }
 
     @Override
     public ResultIterator iterator() throws SQLException {
-        final ImmutableBytesWritable tempPtr = new ImmutableBytesWritable();
-
-        return new DelegateResultIterator(delegate.iterator()) {
+        ResultIterator iterator = new DelegateResultIterator(delegate.iterator()) {
             
             @Override
             public Tuple next() throws SQLException {
-                Tuple tuple = null;
-                while (tuple == null) {
-                    tuple = super.next();
-                    if (tuple == null) {
-                        break;
-                    }
-                    
-                    tuple = tupleProjector.projectResults(tuple);
-                    
-                    if (postFilter != null) {
-                        postFilter.reset();
-                        try {
-                            if (postFilter.evaluate(tuple, tempPtr)) {
-                                Boolean b = (Boolean)postFilter.getDataType().toObject(tempPtr);
-                                if (!b.booleanValue()) {
-                                    tuple = null;
-                                }            
-                            } else {
-                                tuple = null;
-                            }
-                        } catch (IllegalDataException e) {
-                            tuple = null;
-                        }
-                    }
-                }
+                Tuple tuple = super.next();
+                if (tuple == null)
+                    return null;
                 
-                return tuple;
+                return tupleProjector.projectResults(tuple);
             }
 
             @Override
             public String toString() {
-                return "TupleProjectionResultIterator [projector=" + tupleProjector + ", postFilter="
-                        + postFilter + "]";
+                return "TupleProjectionResultIterator [projector=" + tupleProjector + "]";
             }            
         };
+        
+        if (postFilter != null) {
+            iterator = new FilterResultIterator(iterator, postFilter);
+        }
+        
+        return iterator;
     }
 }