You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ma...@apache.org on 2014/11/14 21:30:51 UTC

[5/5] phoenix git commit: PHOENIX-1799 Support many-to-many joins

PHOENIX-1799 Support many-to-many joins


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/eddc846d
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/eddc846d
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/eddc846d

Branch: refs/heads/master
Commit: eddc846d85f97f3cc2206aba27a8d544c4db3384
Parents: a715a79
Author: maryannxue <ma...@apache.org>
Authored: Fri Nov 14 15:29:43 2014 -0500
Committer: maryannxue <ma...@apache.org>
Committed: Fri Nov 14 15:29:43 2014 -0500

----------------------------------------------------------------------
 .../apache/phoenix/compile/GroupByCompiler.java |   4 +-
 .../apache/phoenix/compile/JoinCompiler.java    | 147 ++++----
 .../apache/phoenix/compile/OrderByCompiler.java |  11 +-
 .../apache/phoenix/compile/QueryCompiler.java   | 191 +++++++---
 .../phoenix/compile/SubselectRewriter.java      |  23 +-
 .../apache/phoenix/compile/UpsertCompiler.java  |   5 +
 .../apache/phoenix/compile/WhereCompiler.java   |   4 +-
 .../coprocessor/HashJoinRegionScanner.java      |  17 +-
 .../apache/phoenix/execute/TupleProjector.java  |  59 +--
 .../phoenix/iterate/FilterResultIterator.java   |   3 +-
 .../iterate/MappedByteBufferSortedQueue.java    | 370 ++++---------------
 .../java/org/apache/phoenix/parse/HintNode.java |   6 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |  11 +
 .../phoenix/parse/SelectStatementRewriter.java  |   2 +-
 .../org/apache/phoenix/schema/ValueBitSet.java  |  11 +-
 15 files changed, 406 insertions(+), 458 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java
index a561a47..016cd52 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/GroupByCompiler.java
@@ -136,7 +136,7 @@ public class GroupByCompiler {
      * @throws ColumnNotFoundException if column name could not be resolved
      * @throws AmbiguousColumnException if an unaliased column name is ambiguous across multiple tables
      */
-    public static GroupBy compile(StatementContext context, SelectStatement statement, TupleProjector tupleProjector) throws SQLException {
+    public static GroupBy compile(StatementContext context, SelectStatement statement, TupleProjector tupleProjector, boolean isInRowKeyOrder) throws SQLException {
         List<ParseNode> groupByNodes = statement.getGroupBy();
         /**
          * Distinct can use an aggregate plan if there's no group by.
@@ -179,7 +179,7 @@ public class GroupByCompiler {
             return GroupBy.EMPTY_GROUP_BY;
         }
         
-        boolean isRowKeyOrderedGrouping = groupByVisitor.isOrderPreserving();
+        boolean isRowKeyOrderedGrouping = isInRowKeyOrder && groupByVisitor.isOrderPreserving();
         List<Expression> expressions = Lists.newArrayListWithCapacity(groupByEntries.size());
         List<Expression> keyExpressions = expressions;
         String groupExprAttribName;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
index b519dc4..45b6603 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/JoinCompiler.java
@@ -20,7 +20,6 @@ package org.apache.phoenix.compile;
 import static org.apache.phoenix.schema.SaltingUtil.SALTING_COLUMN;
 
 import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -293,6 +292,10 @@ public class JoinCompiler {
             return columnRefs;
         }
         
+        public ParseNode getPostFiltersCombined() {
+            return combine(postFilters);
+        }
+        
         public void addFilter(ParseNode filter) throws SQLException {
             if (joinSpecs.isEmpty()) {
                 table.addFilter(filter);
@@ -320,7 +323,7 @@ public class JoinCompiler {
             for (JoinSpec joinSpec : joinSpecs) {
                 JoinTable joinTable = joinSpec.getJoinTable();
                 boolean hasSubJoin = !joinTable.getJoinSpecs().isEmpty();
-                for (ComparisonParseNode node : joinSpec.getOnConditions()) {
+                for (EqualParseNode node : joinSpec.getOnConditions()) {
                     node.getLHS().accept(generalRefVisitor);
                     if (hasSubJoin) {
                         node.getRHS().accept(generalRefVisitor);
@@ -384,13 +387,12 @@ public class JoinCompiler {
         }
         
         public SelectStatement getAsSingleSubquery(SelectStatement query, boolean asSubquery) throws SQLException {
-            if (!isFlat(query))
-                throw new SQLFeatureNotSupportedException("Complex subqueries not supported as left join table.");
+            assert (isFlat(query));
             
             if (asSubquery)
                 return query;
             
-            return NODE_FACTORY.select(query.getFrom(), select.getHint(), select.isDistinct(), select.getSelect(), query.getWhere(), select.getGroupBy(), select.getHaving(), select.getOrderBy(), select.getLimit(), select.getBindCount(), select.isAggregate(), select.hasSequence());            
+            return NODE_FACTORY.select(select, query.getFrom(), query.getWhere());            
         }
         
         public boolean hasPostReference() {
@@ -427,7 +429,7 @@ public class JoinCompiler {
     
     public static class JoinSpec {
         private final JoinType type;
-        private final List<ComparisonParseNode> onConditions;
+        private final List<EqualParseNode> onConditions;
         private final JoinTable joinTable;
         private final boolean singleValueOnly;
         private Set<TableRef> dependencies;
@@ -436,7 +438,7 @@ public class JoinCompiler {
         private JoinSpec(JoinType type, ParseNode onNode, JoinTable joinTable, 
                 boolean singleValueOnly, ColumnResolver resolver) throws SQLException {
             this.type = type;
-            this.onConditions = new ArrayList<ComparisonParseNode>();
+            this.onConditions = new ArrayList<EqualParseNode>();
             this.joinTable = joinTable;
             this.singleValueOnly = singleValueOnly;
             this.dependencies = new HashSet<TableRef>();
@@ -454,7 +456,7 @@ public class JoinCompiler {
             return type;
         }
         
-        public List<ComparisonParseNode> getOnConditions() {
+        public List<EqualParseNode> getOnConditions() {
             return onConditions;
         }
         
@@ -470,75 +472,63 @@ public class JoinCompiler {
             return dependencies;
         }
         
-        public Pair<List<Expression>, List<Expression>> compileJoinConditions(StatementContext context, ColumnResolver leftResolver, ColumnResolver rightResolver) throws SQLException {
+        public Pair<List<Expression>, List<Expression>> compileJoinConditions(StatementContext lhsCtx, StatementContext rhsCtx, boolean sortExpressions) throws SQLException {
             if (onConditions.isEmpty()) {
                 return new Pair<List<Expression>, List<Expression>>(
                         Collections.<Expression> singletonList(LiteralExpression.newConstant(1)), 
                         Collections.<Expression> singletonList(LiteralExpression.newConstant(1)));
             }
             
-            ColumnResolver resolver = context.getResolver();
-            List<Pair<Expression, Expression>> compiled = new ArrayList<Pair<Expression, Expression>>(onConditions.size());
-            context.setResolver(leftResolver);
-            ExpressionCompiler expressionCompiler = new ExpressionCompiler(context);
-            for (ParseNode condition : onConditions) {
-                assert (condition instanceof EqualParseNode);
-                EqualParseNode equalNode = (EqualParseNode) condition;
-                expressionCompiler.reset();
-                Expression left = equalNode.getLHS().accept(expressionCompiler);
-                compiled.add(new Pair<Expression, Expression>(left, null));
-            }
-            context.setResolver(rightResolver);
-            expressionCompiler = new ExpressionCompiler(context);
-            Iterator<Pair<Expression, Expression>> iter = compiled.iterator();
-            for (ParseNode condition : onConditions) {
-                Pair<Expression, Expression> p = iter.next();
-                EqualParseNode equalNode = (EqualParseNode) condition;
-                expressionCompiler.reset();
-                Expression right = equalNode.getRHS().accept(expressionCompiler);
-                Expression left = p.getFirst();
+            List<Pair<Expression, Expression>> compiled = Lists.<Pair<Expression, Expression>> newArrayListWithExpectedSize(onConditions.size());
+            ExpressionCompiler lhsCompiler = new ExpressionCompiler(lhsCtx);
+            ExpressionCompiler rhsCompiler = new ExpressionCompiler(rhsCtx);
+            for (EqualParseNode condition : onConditions) {
+                lhsCompiler.reset();
+                Expression left = condition.getLHS().accept(lhsCompiler);
+                rhsCompiler.reset();
+                Expression right = condition.getRHS().accept(rhsCompiler);
                 PDataType toType = getCommonType(left.getDataType(), right.getDataType());
                 if (left.getDataType() != toType) {
                     left = CoerceExpression.create(left, toType);
-                    p.setFirst(left);
                 }
                 if (right.getDataType() != toType) {
                     right = CoerceExpression.create(right, toType);
                 }
-                p.setSecond(right);
+                compiled.add(new Pair<Expression, Expression>(left, right));
             }
-            context.setResolver(resolver); // recover the resolver
-            Collections.sort(compiled, new Comparator<Pair<Expression, Expression>>() {
-                @Override
-                public int compare(Pair<Expression, Expression> o1, Pair<Expression, Expression> o2) {
-                    Expression e1 = o1.getFirst();
-                    Expression e2 = o2.getFirst();
-                    boolean isFixed1 = e1.getDataType().isFixedWidth();
-                    boolean isFixed2 = e2.getDataType().isFixedWidth();
-                    boolean isFixedNullable1 = e1.isNullable() &&isFixed1;
-                    boolean isFixedNullable2 = e2.isNullable() && isFixed2;
-                    if (isFixedNullable1 == isFixedNullable2) {
-                        if (isFixed1 == isFixed2) {
-                            return 0;
-                        } else if (isFixed1) {
-                            return -1;
-                        } else {
+            if (sortExpressions) {
+                Collections.sort(compiled, new Comparator<Pair<Expression, Expression>>() {
+                    @Override
+                    public int compare(Pair<Expression, Expression> o1, Pair<Expression, Expression> o2) {
+                        Expression e1 = o1.getFirst();
+                        Expression e2 = o2.getFirst();
+                        boolean isFixed1 = e1.getDataType().isFixedWidth();
+                        boolean isFixed2 = e2.getDataType().isFixedWidth();
+                        boolean isFixedNullable1 = e1.isNullable() &&isFixed1;
+                        boolean isFixedNullable2 = e2.isNullable() && isFixed2;
+                        if (isFixedNullable1 == isFixedNullable2) {
+                            if (isFixed1 == isFixed2) {
+                                return 0;
+                            } else if (isFixed1) {
+                                return -1;
+                            } else {
+                                return 1;
+                            }
+                        } else if (isFixedNullable1) {
                             return 1;
+                        } else {
+                            return -1;
                         }
-                    } else if (isFixedNullable1) {
-                        return 1;
-                    } else {
-                        return -1;
                     }
-                }
-            });
-            List<Expression> lConditions = new ArrayList<Expression>(compiled.size());
-            List<Expression> rConditions = new ArrayList<Expression>(compiled.size());
+                });
+            }
+            List<Expression> lConditions = Lists.<Expression> newArrayListWithExpectedSize(compiled.size());
+            List<Expression> rConditions = Lists.<Expression> newArrayListWithExpectedSize(compiled.size());
             for (Pair<Expression, Expression> pair : compiled) {
                 lConditions.add(pair.getFirst());
                 rConditions.add(pair.getSecond());
             }
-            
+
             return new Pair<List<Expression>, List<Expression>>(lConditions, rConditions);
         }
         
@@ -683,11 +673,11 @@ public class JoinCompiler {
             return JoinCompiler.compilePostFilterExpression(context, postFilters);
         }
         
-        public SelectStatement getAsSubquery() throws SQLException {
+        public SelectStatement getAsSubquery(List<OrderByNode> orderBy) throws SQLException {
             if (isSubselect())
-                return SubselectRewriter.applyPostFilters(subselect, preFilters, tableNode.getAlias());
+                return SubselectRewriter.applyOrderBy(SubselectRewriter.applyPostFilters(subselect, preFilters, tableNode.getAlias()), orderBy, tableNode.getAlias());
             
-            return NODE_FACTORY.select(tableNode, select.getHint(), false, selectNodes, getPreFiltersCombined(), null, null, null, null, 0, false, select.hasSequence());
+            return NODE_FACTORY.select(tableNode, select.getHint(), false, selectNodes, getPreFiltersCombined(), null, null, orderBy, null, 0, false, select.hasSequence());
         }
         
         public boolean hasFilters() {
@@ -912,12 +902,12 @@ public class JoinCompiler {
     }
     
     private static class OnNodeVisitor extends BooleanParseNodeVisitor<Void> {
-        private List<ComparisonParseNode> onConditions;
+        private List<EqualParseNode> onConditions;
         private Set<TableRef> dependencies;
         private JoinTable joinTable;
         private ColumnRefParseNodeVisitor columnRefVisitor;
         
-        public OnNodeVisitor(ColumnResolver resolver, List<ComparisonParseNode> onConditions, 
+        public OnNodeVisitor(ColumnResolver resolver, List<EqualParseNode> onConditions, 
                 Set<TableRef> dependencies, JoinTable joinTable) {
             this.onConditions = onConditions;
             this.dependencies = dependencies;
@@ -981,7 +971,7 @@ public class JoinCompiler {
                 joinTable.addFilter(node);
             } else if (lhsType == ColumnRefParseNodeVisitor.ColumnRefType.FOREIGN_ONLY
                     && rhsType == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY) {
-                onConditions.add(node);
+                onConditions.add((EqualParseNode) node);
                 dependencies.addAll(lhsTableRefSet);
             } else if (rhsType == ColumnRefParseNodeVisitor.ColumnRefType.FOREIGN_ONLY
                     && lhsType == ColumnRefParseNodeVisitor.ColumnRefType.SELF_ONLY) {
@@ -1069,9 +1059,9 @@ public class JoinCompiler {
         }
     }
     
-    private static String PROJECTED_TABLE_SCHEMA = ".";
+    private static final String PROJECTED_TABLE_SCHEMA = ".";
     // for creation of new statements
-    private static ParseNodeFactory NODE_FACTORY = new ParseNodeFactory();
+    private static final ParseNodeFactory NODE_FACTORY = new ParseNodeFactory();
     
     private static boolean isFlat(SelectStatement select) {
         return !select.isJoin() 
@@ -1167,7 +1157,7 @@ public class JoinCompiler {
             QueryCompiler compiler = new QueryCompiler(statement, select, resolver);
             List<Object> binds = statement.getParameters();
             StatementContext ctx = new StatementContext(statement, resolver, new Scan(), new SequenceManager(statement));
-            QueryPlan plan = compiler.compileJoinQuery(ctx, binds, join, false);
+            QueryPlan plan = compiler.compileJoinQuery(ctx, binds, join, false, false, null);
             TableRef table = plan.getTableRef();
             if (groupByTableRef != null && !groupByTableRef.equals(table)) {
                 groupByTableRef = null;
@@ -1303,17 +1293,30 @@ public class JoinCompiler {
             return new JoinedTableColumnResolver(this, origResolver);
         }
         
-        public PTableWrapper mergeProjectedTables(PTableWrapper rWrapper, boolean innerJoin) throws SQLException {
+        public PTableWrapper mergeProjectedTables(PTableWrapper rWrapper, JoinType type) throws SQLException {
             PTable left = this.getTable();
             PTable right = rWrapper.getTable();
-            List<PColumn> merged = new ArrayList<PColumn>();
-            merged.addAll(left.getColumns());
+            List<PColumn> merged = Lists.<PColumn> newArrayList();
+            if (type != JoinType.Full) {
+                merged.addAll(left.getColumns());
+            } else {
+                for (PColumn c : left.getColumns()) {
+                    if (SchemaUtil.isPKColumn(c)) {
+                        merged.add(c);
+                    } else {
+                        PColumnImpl column = new PColumnImpl(c.getName(), c.getFamilyName(), c.getDataType(), 
+                                c.getMaxLength(), c.getScale(), true, c.getPosition(), 
+                                c.getSortOrder(), c.getArraySize(), c.getViewConstant(), c.isViewReferenced());
+                        merged.add(column);
+                    }
+                }
+            }
             int position = merged.size();
             for (PColumn c : right.getColumns()) {
                 if (!SchemaUtil.isPKColumn(c)) {
                     PColumnImpl column = new PColumnImpl(c.getName(), 
                             PNameFactory.newName(TupleProjector.VALUE_COLUMN_FAMILY), c.getDataType(), 
-                            c.getMaxLength(), c.getScale(), innerJoin ? c.isNullable() : true, position++, 
+                            c.getMaxLength(), c.getScale(), type == JoinType.Inner ? c.isNullable() : true, position++, 
                             c.getSortOrder(), c.getArraySize(), c.getViewConstant(), c.isViewReferenced());
                     merged.add(column);
                 }
@@ -1358,12 +1361,16 @@ public class JoinCompiler {
     	private JoinedTableColumnResolver(PTableWrapper table, ColumnResolver tableResolver) {
     		this.table = table;
     		this.tableResolver = tableResolver;
-            this.tableRef = new TableRef(null, table.getTable(), 0, false);
+            this.tableRef = new TableRef(ParseNodeFactory.createTempAlias(), table.getTable(), 0, false);
     	}
         
         public PTableWrapper getPTableWrapper() {
             return table;
         }
+        
+        public TableRef getTableRef() {
+            return tableRef;
+        }
 
 		@Override
 		public List<TableRef> getTables() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java
index 2629846..444b05e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/OrderByCompiler.java
@@ -33,6 +33,7 @@ import org.apache.phoenix.parse.FilterableStatement;
 import org.apache.phoenix.parse.OrderByNode;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.SortOrder;
 
 import com.google.common.collect.ImmutableList;
@@ -77,7 +78,8 @@ public class OrderByCompiler {
      */
     public static OrderBy compile(StatementContext context,
                                   FilterableStatement statement,
-                                  GroupBy groupBy, Integer limit) throws SQLException {
+                                  GroupBy groupBy, Integer limit, 
+                                  boolean isInRowKeyOrder) throws SQLException {
         List<OrderByNode> orderByNodes = statement.getOrderBy();
         if (orderByNodes.isEmpty()) {
             return OrderBy.EMPTY_ORDER_BY;
@@ -115,11 +117,14 @@ public class OrderByCompiler {
             return OrderBy.EMPTY_ORDER_BY;
         }
         // If we're ordering by the order returned by the scan, we don't need an order by
-        if (visitor.isOrderPreserving()) {
+        if (isInRowKeyOrder && visitor.isOrderPreserving()) {
             if (visitor.isReverse()) {
                 // Don't use reverse scan if we're using a skip scan, as our skip scan doesn't support this yet.
+                // REV_ROW_KEY_ORDER_BY scan would not take effect for a projected table, so don't return it for such table types.
                 if (context.getConnection().getQueryServices().getProps().getBoolean(QueryServices.USE_REVERSE_SCAN_ATTRIB, QueryServicesOptions.DEFAULT_USE_REVERSE_SCAN)
-                        && !context.getScanRanges().useSkipScanFilter()) {
+                        && !context.getScanRanges().useSkipScanFilter()
+                        && context.getCurrentTable().getTable().getType() != PTableType.JOIN
+                        && context.getCurrentTable().getTable().getType() != PTableType.SUBQUERY) {
                     return OrderBy.REV_ROW_KEY_ORDER_BY;
                 }
             } else {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
index 6b767e7..96baafe 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
@@ -40,6 +40,7 @@ import org.apache.phoenix.execute.HashJoinPlan;
 import org.apache.phoenix.execute.HashJoinPlan.HashSubPlan;
 import org.apache.phoenix.execute.HashJoinPlan.WhereClauseSubPlan;
 import org.apache.phoenix.execute.ScanPlan;
+import org.apache.phoenix.execute.SortMergeJoinPlan;
 import org.apache.phoenix.execute.TupleProjectionPlan;
 import org.apache.phoenix.execute.TupleProjector;
 import org.apache.phoenix.expression.Expression;
@@ -51,12 +52,17 @@ import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.join.HashJoinInfo;
+import org.apache.phoenix.parse.AliasedNode;
+import org.apache.phoenix.parse.EqualParseNode;
 import org.apache.phoenix.parse.HintNode.Hint;
 import org.apache.phoenix.parse.JoinTableNode.JoinType;
+import org.apache.phoenix.parse.OrderByNode;
 import org.apache.phoenix.parse.ParseNode;
+import org.apache.phoenix.parse.ParseNodeFactory;
 import org.apache.phoenix.parse.SQLParser;
 import org.apache.phoenix.parse.SelectStatement;
 import org.apache.phoenix.parse.SubqueryParseNode;
+import org.apache.phoenix.parse.TableNode;
 import org.apache.phoenix.query.QueryConstants;
 import org.apache.phoenix.schema.AmbiguousColumnException;
 import org.apache.phoenix.schema.ColumnNotFoundException;
@@ -78,6 +84,7 @@ import com.google.common.collect.Sets;
  * @since 0.1
  */
 public class QueryCompiler {
+    private static final ParseNodeFactory NODE_FACTORY = new ParseNodeFactory();
     /*
      * Not using Scan.setLoadColumnFamiliesOnDemand(true) because we don't
      * want to introduce a dependency on 0.94.5 (where this feature was
@@ -93,6 +100,7 @@ public class QueryCompiler {
     private final List<? extends PDatum> targetColumns;
     private final ParallelIteratorFactory parallelIteratorFactory;
     private final SequenceManager sequenceManager;
+    private final boolean useSortMergeJoin;
 
     public QueryCompiler(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver) throws SQLException {
         this(statement, select, resolver, Collections.<PDatum>emptyList(), null, new SequenceManager(statement));
@@ -106,6 +114,7 @@ public class QueryCompiler {
         this.targetColumns = targetColumns;
         this.parallelIteratorFactory = parallelIteratorFactory;
         this.sequenceManager = sequenceManager;
+        this.useSortMergeJoin = select.getHint().hasHint(Hint.USE_SORT_MERGE_JOIN);
         if (statement.getConnection().getQueryServices().getLowestClusterHBaseVersion() >= PhoenixDatabaseMetaData.ESSENTIAL_FAMILY_VERSION_THRESHOLD) {
             this.scan.setAttribute(LOAD_COLUMN_FAMILIES_ON_DEMAND_ATTR, QueryConstants.TRUE);
         }
@@ -138,26 +147,56 @@ public class QueryCompiler {
                 context = new StatementContext(statement, resolver, scan, sequenceManager);
             }
             JoinTable joinTable = JoinCompiler.compile(statement, select, context.getResolver());
-            return compileJoinQuery(context, binds, joinTable, false);
+            return compileJoinQuery(context, binds, joinTable, false, false, null);
         } else {
             return compileSingleQuery(context, select, binds, false, true);
         }
     }
-
+    
+    /*
+     * Call compileJoinQuery() for join queries recursively down to the leaf JoinTable nodes.
+     * This matches the input JoinTable node against patterns in the following order:
+     * 1. A (leaf JoinTable node, which can be a named table reference or a subquery of any kind.)
+     *    Returns the compilation result of a single table scan or of an independent subquery.
+     * 2. Matching either of (when hint USE_SORT_MERGE_JOIN not specified):
+     *        1) A LEFT/INNER JOIN B
+     *        2) A LEFT/INNER JOIN B (LEFT/INNER JOIN C)+, if hint NO_STAR_JOIN not specified
+     *        where A can be a named table reference or a flat subquery, and B, C, ... can be a named
+     *        table reference, a sub-join or a subquery of any kind.
+     *    Returns a HashJoinPlan{scan: A, hash: B, C, ...}.
+     * 3. Matching pattern:
+     *        A RIGHT/INNER JOIN B (when hint USE_SORT_MERGE_JOIN not specified)
+     *        where B can be a named table reference or a flat subquery, and A can be a named table
+     *        reference, a sub-join or a subquery of any kind.
+     *    Returns a HashJoinPlan{scan: B, hash: A}.
+     *    NOTE that "A LEFT/RIGHT/INNER/FULL JOIN B RIGHT/INNER JOIN C" is viewed as
+     *    "(A LEFT/RIGHT/INNER/FULL JOIN B) RIGHT/INNER JOIN C" here, which means the left part in the
+     *    parenthesis is considered a sub-join.
+     *    viewed as a sub-join.
+     * 4. All the rest that do not qualify for previous patterns or conditions, including FULL joins.
+     *    Returns a SortMergeJoinPlan, the sorting part of which is pushed down to the JoinTable nodes
+     *    of both sides as order-by clauses.
+     * NOTE that SEMI or ANTI joins are treated the same way as LEFT joins in JoinTable pattern matching.
+     *    
+     * If no join algorithm hint is provided, according to the above compilation process, a join query 
+     * plan can probably consist of both HashJoinPlan and SortMergeJoinPlan which may enclose each other.
+     * TODO 1) Use table statistics to guide the choice of join plans.
+     *      2) Make it possible to hint a certain join algorithm for a specific join step.
+     */
     @SuppressWarnings("unchecked")
-    protected QueryPlan compileJoinQuery(StatementContext context, List<Object> binds, JoinTable joinTable, boolean asSubquery) throws SQLException {
+    protected QueryPlan compileJoinQuery(StatementContext context, List<Object> binds, JoinTable joinTable, boolean asSubquery, boolean projectPKColumns, List<OrderByNode> orderBy) throws SQLException {
         byte[] emptyByteArray = new byte[0];
         List<JoinSpec> joinSpecs = joinTable.getJoinSpecs();
         if (joinSpecs.isEmpty()) {
             Table table = joinTable.getTable();
-            SelectStatement subquery = table.getAsSubquery();
+            SelectStatement subquery = table.getAsSubquery(orderBy);
             if (!table.isSubselect()) {
-                ProjectedPTableWrapper projectedTable = table.createProjectedTable(!asSubquery);
+                ProjectedPTableWrapper projectedTable = table.createProjectedTable(!projectPKColumns);
                 TupleProjector.serializeProjectorIntoScan(context.getScan(), projectedTable.createTupleProjector());
                 context.setCurrentTable(table.getTableRef());
                 context.setResolver(projectedTable.createColumnResolver());
                 table.projectColumns(context.getScan());
-                return compileSingleQuery(context, subquery, binds, asSubquery, true);
+                return compileSingleQuery(context, subquery, binds, asSubquery, !asSubquery);
             }
             QueryPlan plan = compileSubquery(subquery);
             ProjectedPTableWrapper projectedTable = table.createProjectedTable(plan.getProjector());
@@ -165,25 +204,26 @@ public class QueryCompiler {
             return new TupleProjectionPlan(plan, projectedTable.createTupleProjector(), table.compilePostFilterExpression(context));
         }
 
-        boolean[] starJoinVector = joinTable.getStarJoinVector();
-        if (starJoinVector != null) {
+        boolean[] starJoinVector;
+        if (!this.useSortMergeJoin && (starJoinVector = joinTable.getStarJoinVector()) != null) {
             Table table = joinTable.getTable();
             ProjectedPTableWrapper initialProjectedTable;
             TableRef tableRef;
             SelectStatement query;
             if (!table.isSubselect()) {
-                initialProjectedTable = table.createProjectedTable(!asSubquery);
+                initialProjectedTable = table.createProjectedTable(!projectPKColumns);
                 tableRef = table.getTableRef();
                 table.projectColumns(context.getScan());
-                query = joinTable.getAsSingleSubquery(table.getAsSubquery(), asSubquery);
+                query = joinTable.getAsSingleSubquery(table.getAsSubquery(orderBy), asSubquery);
             } else {
-                SelectStatement subquery = table.getAsSubquery();
+                SelectStatement subquery = table.getAsSubquery(orderBy);
                 QueryPlan plan = compileSubquery(subquery);
                 initialProjectedTable = table.createProjectedTable(plan.getProjector());
                 tableRef = plan.getTableRef();
                 context.getScan().setFamilyMap(plan.getContext().getScan().getFamilyMap());
                 query = joinTable.getAsSingleSubquery((SelectStatement) plan.getStatement(), asSubquery);
             }
+            context.setCurrentTable(tableRef);
             PTableWrapper projectedTable = initialProjectedTable;
             int count = joinSpecs.size();
             ImmutableBytesPtr[] joinIds = new ImmutableBytesPtr[count];
@@ -199,13 +239,12 @@ public class QueryCompiler {
                 JoinSpec joinSpec = joinSpecs.get(i);
                 Scan subScan = ScanUtil.newScan(originalScan);
                 StatementContext subContext = new StatementContext(statement, context.getResolver(), subScan, new SequenceManager(statement));
-                QueryPlan joinPlan = compileJoinQuery(subContext, binds, joinSpec.getJoinTable(), true);
-                ColumnResolver resolver = subContext.getResolver();
+                QueryPlan joinPlan = compileJoinQuery(subContext, binds, joinSpec.getJoinTable(), true, true, null);
                 boolean hasPostReference = joinSpec.getJoinTable().hasPostReference();
                 if (hasPostReference) {
-                    PTableWrapper subProjTable = ((JoinedTableColumnResolver) (resolver)).getPTableWrapper();
+                    PTableWrapper subProjTable = ((JoinedTableColumnResolver) subContext.getResolver()).getPTableWrapper();
                     tables[i] = subProjTable.getTable();
-                    projectedTable = projectedTable.mergeProjectedTables(subProjTable, joinSpec.getType() == JoinType.Inner);
+                    projectedTable = projectedTable.mergeProjectedTables(subProjTable, joinSpec.getType());
                     needsProject = true;
                 } else {
                     tables[i] = null;
@@ -213,13 +252,13 @@ public class QueryCompiler {
                 if (!starJoinVector[i]) {
                     needsProject = true;
                 }
-                ColumnResolver leftResolver = (!forceProjection && starJoinVector[i]) ? joinTable.getOriginalResolver() : projectedTable.createColumnResolver();
+                context.setResolver((!forceProjection && starJoinVector[i]) ? joinTable.getOriginalResolver() : projectedTable.createColumnResolver());
                 joinIds[i] = new ImmutableBytesPtr(emptyByteArray); // place-holder
-                Pair<List<Expression>, List<Expression>> joinConditions = joinSpec.compileJoinConditions(context, leftResolver, resolver);
+                Pair<List<Expression>, List<Expression>> joinConditions = joinSpec.compileJoinConditions(context, subContext, true);
                 joinExpressions[i] = joinConditions.getFirst();
                 List<Expression> hashExpressions = joinConditions.getSecond();
                 Pair<Expression, Expression> keyRangeExpressions = new Pair<Expression, Expression>(null, null);
-                boolean complete = getKeyExpressionCombinations(keyRangeExpressions, context, tableRef, joinSpec.getType(), joinExpressions[i], hashExpressions);
+                boolean complete = getKeyExpressionCombinations(keyRangeExpressions, context, joinTable.getStatement(), tableRef, joinSpec.getType(), joinExpressions[i], hashExpressions);
                 Expression keyRangeLhsExpression = keyRangeExpressions.getFirst();
                 Expression keyRangeRhsExpression = keyRangeExpressions.getSecond();
                 boolean hasFilters = joinSpec.getJoinTable().hasFilters();
@@ -233,9 +272,8 @@ public class QueryCompiler {
             if (needsProject) {
                 TupleProjector.serializeProjectorIntoScan(context.getScan(), initialProjectedTable.createTupleProjector());
             }
-            context.setCurrentTable(tableRef);
             context.setResolver(needsProject ? projectedTable.createColumnResolver() : joinTable.getOriginalResolver());
-            QueryPlan plan = compileSingleQuery(context, query, binds, asSubquery, joinTable.isAllLeftJoin());
+            QueryPlan plan = compileSingleQuery(context, query, binds, asSubquery, !asSubquery && joinTable.isAllLeftJoin());
             Expression postJoinFilterExpression = joinTable.compilePostFilterExpression(context, table);
             Integer limit = null;
             if (query.getLimit() != null && !query.isAggregate() && !query.isDistinct() && query.getOrderBy().isEmpty()) {
@@ -247,65 +285,114 @@ public class QueryCompiler {
 
         JoinSpec lastJoinSpec = joinSpecs.get(joinSpecs.size() - 1);
         JoinType type = lastJoinSpec.getType();
-        if (type == JoinType.Full)
-            throw new SQLFeatureNotSupportedException(type + " joins not supported.");
-
-        if (type == JoinType.Right || type == JoinType.Inner) {
-            if (!lastJoinSpec.getJoinTable().getJoinSpecs().isEmpty())
-                throw new SQLFeatureNotSupportedException("Right join followed by sub-join is not supported.");
-
+        if (!this.useSortMergeJoin 
+                && (type == JoinType.Right || type == JoinType.Inner) 
+                && lastJoinSpec.getJoinTable().getJoinSpecs().isEmpty()
+                && lastJoinSpec.getJoinTable().getTable().isFlat()) {
             JoinTable rhsJoinTable = lastJoinSpec.getJoinTable();
             Table rhsTable = rhsJoinTable.getTable();
             JoinTable lhsJoin = joinTable.getSubJoinTableWithoutPostFilters();
             Scan subScan = ScanUtil.newScan(originalScan);
             StatementContext lhsCtx = new StatementContext(statement, context.getResolver(), subScan, new SequenceManager(statement));
-            QueryPlan lhsPlan = compileJoinQuery(lhsCtx, binds, lhsJoin, true);
-            ColumnResolver lhsResolver = lhsCtx.getResolver();
-            PTableWrapper lhsProjTable = ((JoinedTableColumnResolver) (lhsResolver)).getPTableWrapper();
+            QueryPlan lhsPlan = compileJoinQuery(lhsCtx, binds, lhsJoin, true, true, null);
+            PTableWrapper lhsProjTable = ((JoinedTableColumnResolver) lhsCtx.getResolver()).getPTableWrapper();
             ProjectedPTableWrapper rhsProjTable;
             TableRef rhsTableRef;
             SelectStatement rhs;
             if (!rhsTable.isSubselect()) {
-                rhsProjTable = rhsTable.createProjectedTable(!asSubquery);
+                rhsProjTable = rhsTable.createProjectedTable(!projectPKColumns);
                 rhsTableRef = rhsTable.getTableRef();
                 rhsTable.projectColumns(context.getScan());
-                rhs = rhsJoinTable.getAsSingleSubquery(rhsTable.getAsSubquery(), asSubquery);
+                rhs = rhsJoinTable.getAsSingleSubquery(rhsTable.getAsSubquery(orderBy), asSubquery);
             } else {
-                SelectStatement subquery = rhsTable.getAsSubquery();
+                SelectStatement subquery = rhsTable.getAsSubquery(orderBy);
                 QueryPlan plan = compileSubquery(subquery);
                 rhsProjTable = rhsTable.createProjectedTable(plan.getProjector());
                 rhsTableRef = plan.getTableRef();
                 context.getScan().setFamilyMap(plan.getContext().getScan().getFamilyMap());
                 rhs = rhsJoinTable.getAsSingleSubquery((SelectStatement) plan.getStatement(), asSubquery);
             }
+            context.setCurrentTable(rhsTableRef);
             boolean forceProjection = rhsTable.isSubselect();
-            ColumnResolver rhsResolver = forceProjection ? rhsProjTable.createColumnResolver() : joinTable.getOriginalResolver();
+            context.setResolver(forceProjection ? rhsProjTable.createColumnResolver() : joinTable.getOriginalResolver());
             ImmutableBytesPtr[] joinIds = new ImmutableBytesPtr[] {new ImmutableBytesPtr(emptyByteArray)};
-            Pair<List<Expression>, List<Expression>> joinConditions = lastJoinSpec.compileJoinConditions(context, lhsResolver, rhsResolver);
+            Pair<List<Expression>, List<Expression>> joinConditions = lastJoinSpec.compileJoinConditions(lhsCtx, context, true);
             List<Expression> joinExpressions = joinConditions.getSecond();
             List<Expression> hashExpressions = joinConditions.getFirst();
-            int fieldPosition = rhsProjTable.getTable().getColumns().size() - rhsProjTable.getTable().getPKColumns().size();
-            PTableWrapper projectedTable = rhsProjTable.mergeProjectedTables(lhsProjTable, type == JoinType.Inner);
-            TupleProjector.serializeProjectorIntoScan(context.getScan(), rhsProjTable.createTupleProjector());
-            context.setCurrentTable(rhsTableRef);
-            context.setResolver(projectedTable.createColumnResolver());
-            QueryPlan rhsPlan = compileSingleQuery(context, rhs, binds, asSubquery, type == JoinType.Right);
+            boolean needsMerge = lhsJoin.hasPostReference();
+            boolean needsProject = forceProjection || asSubquery || needsMerge;
+            PTable lhsTable = needsMerge ? lhsProjTable.getTable() : null;
+            int fieldPosition = needsMerge ? rhsProjTable.getTable().getColumns().size() - rhsProjTable.getTable().getPKColumns().size() : 0;
+            PTableWrapper projectedTable = needsMerge ? rhsProjTable.mergeProjectedTables(lhsProjTable, type == JoinType.Right ? JoinType.Left : type) : rhsProjTable;
+            if (needsProject) {
+                TupleProjector.serializeProjectorIntoScan(context.getScan(), rhsProjTable.createTupleProjector());
+            }
+            context.setResolver(needsProject ? projectedTable.createColumnResolver() : joinTable.getOriginalResolver());
+            QueryPlan rhsPlan = compileSingleQuery(context, rhs, binds, asSubquery, !asSubquery && type == JoinType.Right);
             Expression postJoinFilterExpression = joinTable.compilePostFilterExpression(context, rhsTable);
             Integer limit = null;
             if (rhs.getLimit() != null && !rhs.isAggregate() && !rhs.isDistinct() && rhs.getOrderBy().isEmpty()) {
                 limit = LimitCompiler.compile(context, rhs);
             }
-            HashJoinInfo joinInfo = new HashJoinInfo(projectedTable.getTable(), joinIds, new List[] {joinExpressions}, new JoinType[] {type == JoinType.Right ? JoinType.Left : type}, new boolean[] {true}, new PTable[] {lhsProjTable.getTable()}, new int[] {fieldPosition}, postJoinFilterExpression, limit, forceProjection);
+            HashJoinInfo joinInfo = new HashJoinInfo(projectedTable.getTable(), joinIds, new List[] {joinExpressions}, new JoinType[] {type == JoinType.Right ? JoinType.Left : type}, new boolean[] {true}, new PTable[] {lhsTable}, new int[] {fieldPosition}, postJoinFilterExpression, limit, forceProjection);
             Pair<Expression, Expression> keyRangeExpressions = new Pair<Expression, Expression>(null, null);
-            getKeyExpressionCombinations(keyRangeExpressions, context, rhsTableRef, type, joinExpressions, hashExpressions);
+            getKeyExpressionCombinations(keyRangeExpressions, context, joinTable.getStatement(), rhsTableRef, type, joinExpressions, hashExpressions);
             return HashJoinPlan.create(joinTable.getStatement(), rhsPlan, joinInfo, new HashSubPlan[] {new HashSubPlan(0, lhsPlan, hashExpressions, false, keyRangeExpressions.getFirst(), keyRangeExpressions.getSecond(), lhsJoin.hasFilters())});
         }
 
-        // Do not support queries like "A right join B left join C" with hash-joins.
-        throw new SQLFeatureNotSupportedException("Joins with pattern 'A right join B left join C' not supported.");
+        JoinTable lhsJoin = joinTable.getSubJoinTableWithoutPostFilters();
+        JoinTable rhsJoin = lastJoinSpec.getJoinTable();        
+        if (type == JoinType.Right) {
+            JoinTable temp = lhsJoin;
+            lhsJoin = rhsJoin;
+            rhsJoin = temp;
+        }
+        
+        List<EqualParseNode> joinConditionNodes = lastJoinSpec.getOnConditions();
+        List<OrderByNode> lhsOrderBy = Lists.<OrderByNode> newArrayListWithExpectedSize(joinConditionNodes.size());
+        List<OrderByNode> rhsOrderBy = Lists.<OrderByNode> newArrayListWithExpectedSize(joinConditionNodes.size());
+        for (EqualParseNode condition : joinConditionNodes) {
+            lhsOrderBy.add(NODE_FACTORY.orderBy(type == JoinType.Right ? condition.getRHS() : condition.getLHS(), false, true));
+            rhsOrderBy.add(NODE_FACTORY.orderBy(type == JoinType.Right ? condition.getLHS() : condition.getRHS(), false, true));
+        }
+        
+        Scan lhsScan = ScanUtil.newScan(originalScan);
+        StatementContext lhsCtx = new StatementContext(statement, context.getResolver(), lhsScan, new SequenceManager(statement));
+        boolean preserveRowkey = !projectPKColumns && type != JoinType.Full;
+        QueryPlan lhsPlan = compileJoinQuery(lhsCtx, binds, lhsJoin, true, !preserveRowkey, lhsOrderBy);
+        PTableWrapper lhsProjTable = ((JoinedTableColumnResolver) lhsCtx.getResolver()).getPTableWrapper();
+        boolean isInRowKeyOrder = preserveRowkey && lhsPlan.getOrderBy().getOrderByExpressions().isEmpty();
+        
+        Scan rhsScan = ScanUtil.newScan(originalScan);
+        StatementContext rhsCtx = new StatementContext(statement, context.getResolver(), rhsScan, new SequenceManager(statement));
+        QueryPlan rhsPlan = compileJoinQuery(rhsCtx, binds, rhsJoin, true, true, rhsOrderBy);
+        PTableWrapper rhsProjTable = ((JoinedTableColumnResolver) rhsCtx.getResolver()).getPTableWrapper();
+        
+        Pair<List<Expression>, List<Expression>> joinConditions = lastJoinSpec.compileJoinConditions(type == JoinType.Right ? rhsCtx : lhsCtx, type == JoinType.Right ? lhsCtx : rhsCtx, false);
+        List<Expression> lhsKeyExpressions = type == JoinType.Right ? joinConditions.getSecond() : joinConditions.getFirst();
+        List<Expression> rhsKeyExpressions = type == JoinType.Right ? joinConditions.getFirst() : joinConditions.getSecond();
+        
+        boolean needsMerge = rhsJoin.hasPostReference();
+        PTable rhsTable = needsMerge ? rhsProjTable.getTable() : null;
+        int fieldPosition = needsMerge ? lhsProjTable.getTable().getColumns().size() - lhsProjTable.getTable().getPKColumns().size() : 0;
+        PTableWrapper projectedTable = needsMerge ? lhsProjTable.mergeProjectedTables(rhsProjTable, type == JoinType.Right ? JoinType.Left : type) : lhsProjTable;
+
+        ColumnResolver resolver = projectedTable.createColumnResolver();
+        TableRef tableRef = ((JoinedTableColumnResolver) resolver).getTableRef();
+        StatementContext subCtx = new StatementContext(statement, resolver, ScanUtil.newScan(originalScan), new SequenceManager(statement));
+        subCtx.setCurrentTable(tableRef);
+        QueryPlan innerPlan = new SortMergeJoinPlan(subCtx, joinTable.getStatement(), tableRef, type == JoinType.Right ? JoinType.Left : type, lhsPlan, rhsPlan, lhsKeyExpressions, rhsKeyExpressions, projectedTable.getTable(), lhsProjTable.getTable(), rhsTable, fieldPosition);
+        context.setCurrentTable(tableRef);
+        context.setResolver(resolver);
+        TableNode from = NODE_FACTORY.namedTable(tableRef.getTableAlias(), NODE_FACTORY.table(tableRef.getTable().getSchemaName().getString(), tableRef.getTable().getTableName().getString()));
+        ParseNode where = joinTable.getPostFiltersCombined();
+        SelectStatement select = asSubquery ? NODE_FACTORY.select(from, joinTable.getStatement().getHint(), false, Collections.<AliasedNode> emptyList(), where, null, null, orderBy, null, 0, false, joinTable.getStatement().hasSequence())
+                : NODE_FACTORY.select(joinTable.getStatement(), from, where);
+        
+        return compileSingleFlatQuery(context, select, binds, asSubquery, false, innerPlan, null, isInRowKeyOrder);
     }
 
-    private boolean getKeyExpressionCombinations(Pair<Expression, Expression> combination, StatementContext context, TableRef table, JoinType type, final List<Expression> joinExpressions, final List<Expression> hashExpressions) throws SQLException {
+    private boolean getKeyExpressionCombinations(Pair<Expression, Expression> combination, StatementContext context, SelectStatement select, TableRef table, JoinType type, final List<Expression> joinExpressions, final List<Expression> hashExpressions) throws SQLException {
         if (type != JoinType.Inner && type != JoinType.Semi)
             return false;
 
@@ -313,7 +400,7 @@ public class QueryCompiler {
         StatementContext contextCopy = new StatementContext(statement, context.getResolver(), scanCopy, new SequenceManager(statement));
         contextCopy.setCurrentTable(table);
         List<Expression> lhsCombination = Lists.<Expression> newArrayList();
-        boolean complete = WhereOptimizer.getKeyExpressionCombination(lhsCombination, contextCopy, this.select, joinExpressions);
+        boolean complete = WhereOptimizer.getKeyExpressionCombination(lhsCombination, contextCopy, select, joinExpressions);
         if (lhsCombination.isEmpty())
             return false;
 
@@ -355,7 +442,7 @@ public class QueryCompiler {
     protected QueryPlan compileSingleQuery(StatementContext context, SelectStatement select, List<Object> binds, boolean asSubquery, boolean allowPageFilter) throws SQLException{
         SelectStatement innerSelect = select.getInnerSelectStatement();
         if (innerSelect == null) {
-            return compileSingleFlatQuery(context, select, binds, asSubquery, allowPageFilter, null, null);
+            return compileSingleFlatQuery(context, select, binds, asSubquery, allowPageFilter, null, null, true);
         }
 
         QueryPlan innerPlan = compileSubquery(innerSelect);
@@ -369,10 +456,10 @@ public class QueryCompiler {
         tableRef = resolver.getTables().get(0);
         context.setCurrentTable(tableRef);
 
-        return compileSingleFlatQuery(context, select, binds, asSubquery, allowPageFilter, innerPlan, innerPlan.getOrderBy().getOrderByExpressions().isEmpty() ? tupleProjector : null);
+        return compileSingleFlatQuery(context, select, binds, asSubquery, allowPageFilter, innerPlan, tupleProjector, innerPlan.getOrderBy().getOrderByExpressions().isEmpty());
     }
 
-    protected QueryPlan compileSingleFlatQuery(StatementContext context, SelectStatement select, List<Object> binds, boolean asSubquery, boolean allowPageFilter, QueryPlan innerPlan, TupleProjector innerPlanTupleProjector) throws SQLException{
+    protected QueryPlan compileSingleFlatQuery(StatementContext context, SelectStatement select, List<Object> binds, boolean asSubquery, boolean allowPageFilter, QueryPlan innerPlan, TupleProjector innerPlanTupleProjector, boolean isInRowKeyOrder) throws SQLException{
         PhoenixConnection connection = statement.getConnection();
         ColumnResolver resolver = context.getResolver();
         TableRef tableRef = context.getCurrentTable();
@@ -384,7 +471,7 @@ public class QueryCompiler {
         }
         Integer limit = LimitCompiler.compile(context, select);
 
-        GroupBy groupBy = GroupByCompiler.compile(context, select, innerPlanTupleProjector);
+        GroupBy groupBy = GroupByCompiler.compile(context, select, innerPlanTupleProjector, isInRowKeyOrder);
         // Optimize the HAVING clause by finding any group by expressions that can be moved
         // to the WHERE clause
         select = HavingCompiler.rewrite(context, select, groupBy);
@@ -397,7 +484,7 @@ public class QueryCompiler {
         Set<SubqueryParseNode> subqueries = Sets.<SubqueryParseNode> newHashSet();
         Expression where = WhereCompiler.compile(context, select, viewWhere, subqueries);
         context.setResolver(resolver); // recover resolver
-        OrderBy orderBy = OrderByCompiler.compile(context, select, groupBy, limit);
+        OrderBy orderBy = OrderByCompiler.compile(context, select, groupBy, limit, isInRowKeyOrder); 
         RowProjector projector = ProjectionCompiler.compile(context, select, groupBy, asSubquery ? Collections.<PDatum>emptyList() : targetColumns);
 
         // Final step is to build the query plan

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
index d229478..805894f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
@@ -58,6 +58,13 @@ public class SubselectRewriter extends ParseNodeRewriter {
         return statement.getLimit() == null && (!statement.isAggregate() || !statement.getGroupBy().isEmpty());        
     }
     
+    public static SelectStatement applyOrderBy(SelectStatement statement, List<OrderByNode> orderBy, String subqueryAlias) throws SQLException {
+        if (orderBy == null)
+            return statement;
+        
+        return new SubselectRewriter(null, statement.getSelect(), subqueryAlias).applyOrderBy(statement, orderBy);
+    }
+    
     public static SelectStatement flatten(SelectStatement select, PhoenixConnection connection) throws SQLException {
         TableNode from = select.getFrom();
         while (from != null && from instanceof DerivedTableNode) {
@@ -209,16 +216,24 @@ public class SubselectRewriter extends ParseNodeRewriter {
             if (where != null) {
                 postFiltersRewrite.add(where);
             }
-            return NODE_FACTORY.select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(), combine(postFiltersRewrite), statement.getGroupBy(), statement.getHaving(), statement.getOrderBy(), statement.getLimit(),
-                    statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+            return NODE_FACTORY.select(statement, combine(postFiltersRewrite));
         }
         
         ParseNode having = statement.getHaving();
         if (having != null) {
             postFiltersRewrite.add(having);
         }
-        return NODE_FACTORY.select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(), statement.getWhere(), statement.getGroupBy(), combine(postFiltersRewrite), statement.getOrderBy(), statement.getLimit(),
-                statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+        return NODE_FACTORY.select(statement, statement.getWhere(), combine(postFiltersRewrite));
+    }
+    
+    private SelectStatement applyOrderBy(SelectStatement statement, List<OrderByNode> orderBy) throws SQLException {
+        List<OrderByNode> orderByRewrite = Lists.<OrderByNode> newArrayListWithExpectedSize(orderBy.size());
+        for (OrderByNode orderByNode : orderBy) {
+            ParseNode node = orderByNode.getNode();
+            orderByRewrite.add(NODE_FACTORY.orderBy(node.accept(this), orderByNode.isNullsLast(), orderByNode.isAscending()));
+        }
+        
+        return NODE_FACTORY.select(statement, orderByRewrite);
     }
     
     @Override

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
index 0be40b8..796f368 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UpsertCompiler.java
@@ -419,6 +419,11 @@ public class UpsertCompiler {
                     try {
                         QueryCompiler compiler = new QueryCompiler(statement, select, selectResolver, targetColumns, parallelIteratorFactoryToBe, new SequenceManager(statement));
                         queryPlanToBe = compiler.compile();
+                        // This is post-fix: if the tableRef is a projected table, this means there are post-processing 
+                        // steps and parallelIteratorFactory did not take effect.
+                        if (queryPlanToBe.getTableRef().getTable().getType() == PTableType.JOIN || queryPlanToBe.getTableRef().getTable().getType() == PTableType.SUBQUERY) {
+                            parallelIteratorFactoryToBe = null;
+                        }
                     } catch (MetaDataEntityNotFoundException e) {
                         retryOnce = false; // don't retry if select clause has meta data entities that aren't found, as we already updated the cache
                         throw e;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
index 2c49fed..471ee37 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/WhereCompiler.java
@@ -140,7 +140,9 @@ public class WhereCompiler {
             expression = AndExpression.create(filters);
         }
         
-        expression = WhereOptimizer.pushKeyExpressionsToScan(context, statement, expression, extractedNodes);
+        if (context.getCurrentTable().getTable().getType() != PTableType.JOIN && context.getCurrentTable().getTable().getType() != PTableType.SUBQUERY) {
+            expression = WhereOptimizer.pushKeyExpressionsToScan(context, statement, expression, extractedNodes);
+        }
         setScanFilter(context, statement, expression, whereCompiler.disambiguateWithFamily, hashJoinOptimization);
 
         return expression;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
index 724122d..176520e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/HashJoinRegionScanner.java
@@ -154,7 +154,7 @@ public class HashJoinRegionScanner implements RegionScanner {
                 for (int i = 0; i < count; i++) {
                     boolean earlyEvaluation = joinInfo.earlyEvaluation()[i];
                     JoinType type = joinInfo.getJoinTypes()[i];
-                    if (earlyEvaluation && (tempTuples[i] == null || type == JoinType.Semi))
+                    if (earlyEvaluation && (type == JoinType.Semi || type == JoinType.Anti))
                         continue;
                     int j = resultQueue.size();
                     while (j-- > 0) {
@@ -163,12 +163,23 @@ public class HashJoinRegionScanner implements RegionScanner {
                             ImmutableBytesPtr key = TupleUtil.getConcatenatedValue(lhs, joinInfo.getJoinExpressions()[i]);
                             tempTuples[i] = hashCaches[i].get(key);                        	
                             if (tempTuples[i] == null) {
-                                if (type != JoinType.Inner && type != JoinType.Semi) {
+                                if (type == JoinType.Inner || type == JoinType.Semi) {
+                                    continue;
+                                } else if (type == JoinType.Anti) {
                                     resultQueue.offer(lhs);
+                                    continue;
                                 }
-                                continue;
                             }
                         }
+                        if (tempTuples[i] == null) {
+                            Tuple joined = tempSrcBitSet[i] == ValueBitSet.EMPTY_VALUE_BITSET ?
+                                    lhs : TupleProjector.mergeProjectedValue(
+                                            (ProjectedValueTuple) lhs, schema, tempDestBitSet,
+                                            null, joinInfo.getSchemas()[i], tempSrcBitSet[i], 
+                                            joinInfo.getFieldPositions()[i]);
+                            resultQueue.offer(joined);
+                            continue;
+                        }
                         for (Tuple t : tempTuples[i]) {
                             Tuple joined = tempSrcBitSet[i] == ValueBitSet.EMPTY_VALUE_BITSET ?
                                     lhs : TupleProjector.mergeProjectedValue(

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java
index 346a9fd..77682e4 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/TupleProjector.java
@@ -152,14 +152,21 @@ public class TupleProjector {
     public static class ProjectedValueTuple extends BaseTuple {
         private ImmutableBytesWritable keyPtr = new ImmutableBytesWritable();
         private long timestamp;
-        private byte[] projectedValue;
+        private ImmutableBytesWritable projectedValue = new ImmutableBytesWritable();
         private int bitSetLen;
         private KeyValue keyValue;
 
-        private ProjectedValueTuple(byte[] keyBuffer, int keyOffset, int keyLength, long timestamp, byte[] projectedValue, int bitSetLen) {
+        public ProjectedValueTuple(Tuple keyBase, long timestamp, byte[] projectedValue, int valueOffset, int valueLength, int bitSetLen) {
+            keyBase.getKey(this.keyPtr);
+            this.timestamp = timestamp;
+            this.projectedValue.set(projectedValue, valueOffset, valueLength);
+            this.bitSetLen = bitSetLen;
+        }
+
+        public ProjectedValueTuple(byte[] keyBuffer, int keyOffset, int keyLength, long timestamp, byte[] projectedValue, int valueOffset, int valueLength, int bitSetLen) {
             this.keyPtr.set(keyBuffer, keyOffset, keyLength);
             this.timestamp = timestamp;
-            this.projectedValue = projectedValue;
+            this.projectedValue.set(projectedValue, valueOffset, valueLength);
             this.bitSetLen = bitSetLen;
         }
         
@@ -171,7 +178,7 @@ public class TupleProjector {
             return timestamp;
         }
         
-        public byte[] getProjectedValue() {
+        public ImmutableBytesWritable getProjectedValue() {
             return projectedValue;
         }
         
@@ -196,7 +203,7 @@ public class TupleProjector {
         public KeyValue getValue(byte[] family, byte[] qualifier) {
             if (keyValue == null) {
                 keyValue = KeyValueUtil.newKeyValue(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength(), 
-                        VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER, timestamp, projectedValue, 0, projectedValue.length);
+                        VALUE_COLUMN_FAMILY, VALUE_COLUMN_QUALIFIER, timestamp, projectedValue.get(), projectedValue.getOffset(), projectedValue.getLength());
             }
             return keyValue;
         }
@@ -204,7 +211,7 @@ public class TupleProjector {
         @Override
         public boolean getValue(byte[] family, byte[] qualifier,
                 ImmutableBytesWritable ptr) {
-            ptr.set(projectedValue);
+            ptr.set(projectedValue.get(), projectedValue.getOffset(), projectedValue.getLength());
             return true;
         }
 
@@ -222,7 +229,7 @@ public class TupleProjector {
     public ProjectedValueTuple projectResults(Tuple tuple) {
     	byte[] bytesValue = schema.toBytes(tuple, getExpressions(), valueSet, ptr);
     	Cell base = tuple.getValue(0);
-        return new ProjectedValueTuple(base.getRowArray(), base.getRowOffset(), base.getRowLength(), base.getTimestamp(), bytesValue, valueSet.getEstimatedLength());
+        return new ProjectedValueTuple(base.getRowArray(), base.getRowOffset(), base.getRowLength(), base.getTimestamp(), bytesValue, 0, bytesValue.length, valueSet.getEstimatedLength());
     }
     
     public static void decodeProjectedValue(Tuple tuple, ImmutableBytesWritable ptr) throws IOException {
@@ -233,27 +240,33 @@ public class TupleProjector {
     
     public static ProjectedValueTuple mergeProjectedValue(ProjectedValueTuple dest, KeyValueSchema destSchema, ValueBitSet destBitSet,
     		Tuple src, KeyValueSchema srcSchema, ValueBitSet srcBitSet, int offset) throws IOException {
-    	ImmutableBytesWritable destValue = new ImmutableBytesWritable(dest.getProjectedValue());
+    	ImmutableBytesWritable destValue = dest.getProjectedValue();
+        int origDestBitSetLen = dest.getBitSetLength();
     	destBitSet.clear();
-    	destBitSet.or(destValue);
-    	int origDestBitSetLen = dest.getBitSetLength();
-    	ImmutableBytesWritable srcValue = new ImmutableBytesWritable();
-    	decodeProjectedValue(src, srcValue);
-    	srcBitSet.clear();
-    	srcBitSet.or(srcValue);
-    	int origSrcBitSetLen = srcBitSet.getEstimatedLength();
-    	for (int i = 0; i < srcBitSet.getMaxSetBit(); i++) {
-    		if (srcBitSet.get(i)) {
-    			destBitSet.set(offset + i);
-    		}
+    	destBitSet.or(destValue, origDestBitSetLen);
+    	ImmutableBytesWritable srcValue = null;
+    	int srcValueLen = 0;
+    	if (src != null) {
+    	    srcValue = new ImmutableBytesWritable();
+    	    decodeProjectedValue(src, srcValue);
+    	    srcBitSet.clear();
+    	    srcBitSet.or(srcValue);
+    	    int origSrcBitSetLen = srcBitSet.getEstimatedLength();
+    	    for (int i = 0; i <= srcBitSet.getMaxSetBit(); i++) {
+    	        if (srcBitSet.get(i)) {
+    	            destBitSet.set(offset + i);
+    	        }
+    	    }
+    	    srcValueLen = srcValue.getLength() - origSrcBitSetLen;
     	}
     	int destBitSetLen = destBitSet.getEstimatedLength();
-    	byte[] merged = new byte[destValue.getLength() - origDestBitSetLen + srcValue.getLength() - origSrcBitSetLen + destBitSetLen];
+    	byte[] merged = new byte[destValue.getLength() - origDestBitSetLen + srcValueLen + destBitSetLen];
     	int o = Bytes.putBytes(merged, 0, destValue.get(), destValue.getOffset(), destValue.getLength() - origDestBitSetLen);
-    	o = Bytes.putBytes(merged, o, srcValue.get(), srcValue.getOffset(), srcValue.getLength() - origSrcBitSetLen);
+    	if (src != null) {
+    	    o = Bytes.putBytes(merged, o, srcValue.get(), srcValue.getOffset(), srcValueLen);
+    	}
     	destBitSet.toBytes(merged, o);
-    	ImmutableBytesWritable keyPtr = dest.getKeyPtr();
-        return new ProjectedValueTuple(keyPtr.get(), keyPtr.getOffset(), keyPtr.getLength(), dest.getTimestamp(), merged, destBitSetLen);
+        return new ProjectedValueTuple(dest, dest.getTimestamp(), merged, 0, merged.length, destBitSetLen);
     }
 
     public KeyValueSchema getSchema() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/iterate/FilterResultIterator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/FilterResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/FilterResultIterator.java
index 0c68a20..6c332fa 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/FilterResultIterator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/FilterResultIterator.java
@@ -59,7 +59,8 @@ public class FilterResultIterator  extends LookAheadResultIterator {
         Tuple next;
         do {
             next = delegate.next();
-        } while (next != null && expression.evaluate(next, ptr) && Boolean.FALSE.equals(expression.getDataType().toObject(ptr)));
+            expression.reset();
+        } while (next != null && (!expression.evaluate(next, ptr) || Boolean.FALSE.equals(expression.getDataType().toObject(ptr))));
         return next;
     }
     

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/iterate/MappedByteBufferSortedQueue.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/MappedByteBufferSortedQueue.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/MappedByteBufferSortedQueue.java
index 85c91a1..03a360e 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/MappedByteBufferSortedQueue.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/MappedByteBufferSortedQueue.java
@@ -17,19 +17,12 @@
  */
 package org.apache.phoenix.iterate;
 
-import java.io.File;
 import java.io.IOException;
-import java.io.RandomAccessFile;
 import java.nio.MappedByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.channels.FileChannel.MapMode;
-import java.util.AbstractQueue;
 import java.util.ArrayList;
 import java.util.Comparator;
-import java.util.Iterator;
 import java.util.List;
-import java.util.UUID;
-
+import java.util.Queue;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -41,182 +34,104 @@ import org.apache.phoenix.util.ResultUtil;
 
 import com.google.common.collect.MinMaxPriorityQueue;
 
-public class MappedByteBufferSortedQueue extends AbstractQueue<ResultEntry> {
+public class MappedByteBufferSortedQueue extends MappedByteBufferQueue<ResultEntry> {
     private Comparator<ResultEntry> comparator;
     private final int limit;
-    private final int thresholdBytes;
-    private List<MappedByteBufferPriorityQueue> queues = new ArrayList<MappedByteBufferPriorityQueue>();
-    private MappedByteBufferPriorityQueue currentQueue = null;
-    private int currentIndex = 0;
-    MinMaxPriorityQueue<IndexedResultEntry> mergedQueue = null;
 
     public MappedByteBufferSortedQueue(Comparator<ResultEntry> comparator,
             Integer limit, int thresholdBytes) throws IOException {
+        super(thresholdBytes);
         this.comparator = comparator;
         this.limit = limit == null ? -1 : limit;
-        this.thresholdBytes = thresholdBytes;
-        this.currentQueue = new MappedByteBufferPriorityQueue(0,
-                this.limit, thresholdBytes, comparator);
-        this.queues.add(currentQueue);
-    }
-
-    @Override
-    public boolean offer(ResultEntry e) {
-        try {
-            boolean isFlush = this.currentQueue.writeResult(e);
-            if (isFlush) {
-                currentIndex++;
-                currentQueue = new MappedByteBufferPriorityQueue(currentIndex,
-                        limit, thresholdBytes, comparator);
-                queues.add(currentQueue);
-            }
-        } catch (IOException ioe) {
-            throw new RuntimeException(ioe);
-        }
-
-        return true;
-    }
-
-    @Override
-    public ResultEntry poll() {
-        if (mergedQueue == null) {
-            mergedQueue = MinMaxPriorityQueue.<ResultEntry> orderedBy(
-                    comparator).maximumSize(queues.size()).create();
-            for (MappedByteBufferPriorityQueue queue : queues) {
-                try {
-                    IndexedResultEntry next = queue.getNextResult();
-                    if (next != null) {
-                        mergedQueue.add(next);
-                    }
-                } catch (IOException e) {
-                    throw new RuntimeException(e);
-                }
-            }
-        }
-        if (!mergedQueue.isEmpty()) {
-            IndexedResultEntry re = mergedQueue.pollFirst();
-            if (re != null) {
-                IndexedResultEntry next = null;
-                try {
-                    next = queues.get(re.getIndex()).getNextResult();
-                } catch (IOException e) {
-                    throw new RuntimeException(e);
-                }
-                if (next != null) {
-                    mergedQueue.add(next);
-                }
-                return re;
-            }
-        }
-        return null;
     }
 
     @Override
-    public ResultEntry peek() {
-        if (mergedQueue == null) {
-            mergedQueue = MinMaxPriorityQueue.<ResultEntry> orderedBy(
-                    comparator).maximumSize(queues.size()).create();
-            for (MappedByteBufferPriorityQueue queue : queues) {
-                try {
-                    IndexedResultEntry next = queue.getNextResult();
-                    if (next != null) {
-                        mergedQueue.add(next);
-                    }
-                } catch (IOException e) {
-                    throw new RuntimeException(e);
-                }
-            }
-        }
-        if (!mergedQueue.isEmpty()) {
-            IndexedResultEntry re = mergedQueue.peekFirst();
-            if (re != null) {
-                return re;
-            }
-        }
-        return null;
+    protected org.apache.phoenix.iterate.MappedByteBufferQueue.MappedByteBufferSegmentQueue<ResultEntry> createSegmentQueue(
+            int index, int thresholdBytes) {
+        return new MappedByteBufferResultEntryPriorityQueue(index, thresholdBytes, limit, comparator);
     }
 
     @Override
-    public Iterator<ResultEntry> iterator() {
-        throw new UnsupportedOperationException();
+    protected Comparator<org.apache.phoenix.iterate.MappedByteBufferQueue.MappedByteBufferSegmentQueue<ResultEntry>> getSegmentQueueComparator() {
+        return new Comparator<MappedByteBufferSegmentQueue<ResultEntry>>() {
+            @Override
+            public int compare(MappedByteBufferSegmentQueue<ResultEntry> q1,
+                    MappedByteBufferSegmentQueue<ResultEntry> q2) {
+                return comparator.compare(q1.peek(), q2.peek());
+            }};
     }
 
-    @Override
-    public int size() {
-        int size = 0;
-        for (MappedByteBufferPriorityQueue queue : queues) {
-            size += queue.size();
+    private static class MappedByteBufferResultEntryPriorityQueue extends MappedByteBufferSegmentQueue<ResultEntry> {    	
+        private MinMaxPriorityQueue<ResultEntry> results = null;
+        
+    	public MappedByteBufferResultEntryPriorityQueue(int index,
+                int thresholdBytes, int limit, Comparator<ResultEntry> comparator) {
+            super(index, thresholdBytes, limit >= 0);
+            this.results = limit < 0 ? 
+                    MinMaxPriorityQueue.<ResultEntry> orderedBy(comparator).create()
+                  : MinMaxPriorityQueue.<ResultEntry> orderedBy(comparator).maximumSize(limit).create();
         }
-        return size;
-    }
-    
-    public long getByteSize() {
-        return currentQueue.getInMemByteSize();
-    }
 
-    public void close() {
-        if (queues != null) {
-            for (MappedByteBufferPriorityQueue queue : queues) {
-                queue.close();
-            }
+        @Override
+        protected Queue<ResultEntry> getInMemoryQueue() {
+            return results;
         }
-    }
-
-    private static class IndexedResultEntry extends ResultEntry {
-        private int index;
 
-        public IndexedResultEntry(int index, ResultEntry resultEntry) {
-            super(resultEntry.sortKeys, resultEntry.result);
-            this.index = index;
+        @Override
+        protected int sizeOf(ResultEntry e) {
+            return sizeof(e.sortKeys) + sizeof(toKeyValues(e));
         }
 
-        public int getIndex() {
-            return this.index;
+        @Override
+        protected void writeToBuffer(MappedByteBuffer buffer, ResultEntry e) {
+            int totalLen = 0;
+            List<KeyValue> keyValues = toKeyValues(e);
+            for (KeyValue kv : keyValues) {
+                totalLen += (kv.getLength() + Bytes.SIZEOF_INT);
+            }
+            buffer.putInt(totalLen);
+            for (KeyValue kv : keyValues) {
+                buffer.putInt(kv.getLength());
+                buffer.put(kv.getBuffer(), kv.getOffset(), kv
+                        .getLength());
+            }
+            ImmutableBytesWritable[] sortKeys = e.sortKeys;
+            buffer.putInt(sortKeys.length);
+            for (ImmutableBytesWritable sortKey : sortKeys) {
+                if (sortKey != null) {
+                    buffer.putInt(sortKey.getLength());
+                    buffer.put(sortKey.get(), sortKey.getOffset(),
+                            sortKey.getLength());
+                } else {
+                    buffer.putInt(0);
+                }
+            }
         }
-    }
 
-    private static class MappedByteBufferPriorityQueue {
-    	// at least create 128 KB MappedByteBuffers
-        private static final long DEFAULT_MAPPING_SIZE = 128 * 1024;
-        
-        private final int limit;
-        private final int thresholdBytes;
-        private long totalResultSize = 0;
-        private int maxResultSize = 0;
-        private long mappingSize = 0;
-        private long writeIndex = 0;
-        private long readIndex = 0;
-        private MappedByteBuffer writeBuffer;
-        private MappedByteBuffer readBuffer;
-        private FileChannel fc;
-        private RandomAccessFile af;
-        private File file;
-        private boolean isClosed = false;
-        MinMaxPriorityQueue<ResultEntry> results = null;
-        private boolean flushBuffer = false;
-        private int index;
-        private int flushedCount;
-
-        public MappedByteBufferPriorityQueue(int index, int limit, int thresholdBytes,
-                Comparator<ResultEntry> comparator) throws IOException {
-            this.index = index;
-            this.limit = limit;
-            this.thresholdBytes = thresholdBytes;
-            results = limit < 0 ? 
-                    MinMaxPriorityQueue.<ResultEntry> orderedBy(comparator).create()
-                  : MinMaxPriorityQueue.<ResultEntry> orderedBy(comparator).maximumSize(limit).create();
-        }
-        
-        public int size() {
-            if (flushBuffer)
-                return flushedCount;
-            return results.size();
-        }
-        
-        public long getInMemByteSize() {
-            if (flushBuffer)
-                return 0;
-            return totalResultSize;
+        @Override
+        protected ResultEntry readFromBuffer(MappedByteBuffer buffer) {            
+            int length = buffer.getInt();
+            if (length < 0)
+                return null;
+            
+            byte[] rb = new byte[length];
+            buffer.get(rb);
+            Result result = ResultUtil.toResult(new ImmutableBytesWritable(rb));
+            ResultTuple rt = new ResultTuple(result);
+            int sortKeySize = buffer.getInt();
+            ImmutableBytesWritable[] sortKeys = new ImmutableBytesWritable[sortKeySize];
+            for (int i = 0; i < sortKeySize; i++) {
+                int contentLength = buffer.getInt();
+                if (contentLength > 0) {
+                    byte[] sortKeyContent = new byte[contentLength];
+                    buffer.get(sortKeyContent);
+                    sortKeys[i] = new ImmutableBytesWritable(sortKeyContent);
+                } else {
+                    sortKeys[i] = null;
+                }
+            }
+            
+            return new ResultEntry(sortKeys, rt);
         }
 
         private List<KeyValue> toKeyValues(ResultEntry entry) {
@@ -252,138 +167,5 @@ public class MappedByteBufferSortedQueue extends AbstractQueue<ResultEntry> {
             }
             return size;
         }
-
-        public boolean writeResult(ResultEntry entry) throws IOException {
-            if (flushBuffer)
-                throw new IOException("Results already flushed");
-            
-            int sortKeySize = sizeof(entry.sortKeys);
-            int resultSize = sizeof(toKeyValues(entry)) + sortKeySize;
-            boolean added = results.add(entry);
-            if (added) {
-                maxResultSize = Math.max(maxResultSize, resultSize);
-                totalResultSize = limit < 0 ? (totalResultSize + resultSize) : maxResultSize * results.size();
-                if (totalResultSize >= thresholdBytes) {
-                    this.file = File.createTempFile(UUID.randomUUID().toString(), null);
-                    this.af = new RandomAccessFile(file, "rw");
-                    this.fc = af.getChannel();
-                    mappingSize = Math.min(Math.max(maxResultSize, DEFAULT_MAPPING_SIZE), totalResultSize);
-                    writeBuffer = fc.map(MapMode.READ_WRITE, writeIndex, mappingSize);
-                
-                    int resSize = results.size();
-                    for (int i = 0; i < resSize; i++) {                
-                        int totalLen = 0;
-                        ResultEntry re = results.pollFirst();
-                        List<KeyValue> keyValues = toKeyValues(re);
-                        for (KeyValue kv : keyValues) {
-                            totalLen += (kv.getLength() + Bytes.SIZEOF_INT);
-                        }
-                        writeBuffer.putInt(totalLen);
-                        for (KeyValue kv : keyValues) {
-                            writeBuffer.putInt(kv.getLength());
-                            writeBuffer.put(kv.getBuffer(), kv.getOffset(), kv
-                                    .getLength());
-                        }
-                        ImmutableBytesWritable[] sortKeys = re.sortKeys;
-                        writeBuffer.putInt(sortKeys.length);
-                        for (ImmutableBytesWritable sortKey : sortKeys) {
-                            if (sortKey != null) {
-                                writeBuffer.putInt(sortKey.getLength());
-                                writeBuffer.put(sortKey.get(), sortKey.getOffset(),
-                                        sortKey.getLength());
-                            } else {
-                                writeBuffer.putInt(0);
-                            }
-                        }
-                        // buffer close to exhausted, re-map.
-                        if (mappingSize - writeBuffer.position() < maxResultSize) {
-                            writeIndex += writeBuffer.position();
-                            writeBuffer = fc.map(MapMode.READ_WRITE, writeIndex, mappingSize);
-                        }
-                    }
-                    writeBuffer.putInt(-1); // end
-                    flushedCount = results.size();
-                    results.clear();
-                    flushBuffer = true;
-                }
-            }
-            return flushBuffer;
-        }
-
-        public IndexedResultEntry getNextResult() throws IOException {
-            if (isClosed)
-                return null;
-            
-            if (!flushBuffer) {
-                ResultEntry re = results.poll();
-                if (re == null) {
-                    reachedEnd();
-                    return null;
-                }
-                return new IndexedResultEntry(index, re);
-            }
-            
-            if (readBuffer == null) {
-                readBuffer = this.fc.map(MapMode.READ_ONLY, readIndex, mappingSize);
-            }
-            
-            int length = readBuffer.getInt();
-            if (length < 0) {
-                reachedEnd();
-                return null;
-            }
-            
-            byte[] rb = new byte[length];
-            readBuffer.get(rb);
-            Result result = ResultUtil.toResult(new ImmutableBytesWritable(rb));
-            ResultTuple rt = new ResultTuple(result);
-            int sortKeySize = readBuffer.getInt();
-            ImmutableBytesWritable[] sortKeys = new ImmutableBytesWritable[sortKeySize];
-            for (int i = 0; i < sortKeySize; i++) {
-                int contentLength = readBuffer.getInt();
-                if (contentLength > 0) {
-                    byte[] sortKeyContent = new byte[contentLength];
-                    readBuffer.get(sortKeyContent);
-                    sortKeys[i] = new ImmutableBytesWritable(sortKeyContent);
-                } else {
-                    sortKeys[i] = null;
-                }
-            }
-            // buffer close to exhausted, re-map.
-            if (mappingSize - readBuffer.position() < maxResultSize) {
-                readIndex += readBuffer.position();
-                readBuffer = fc.map(MapMode.READ_ONLY, readIndex, mappingSize);
-            }
-            
-            return new IndexedResultEntry(index, new ResultEntry(sortKeys, rt));
-        }
-
-        private void reachedEnd() {
-            this.isClosed = true;
-            if (this.fc != null) {
-                try {
-                    this.fc.close();
-                } catch (IOException ignored) {
-                }
-                this.fc = null;
-            }
-            if (this.af != null) {
-                try {
-                    this.af.close();
-                } catch (IOException ignored) {
-                }
-                this.af = null;
-            }
-            if (this.file != null) {
-                file.delete();
-                file = null;
-            }
-        }
-
-        public void close() {
-            if (!isClosed) {
-                this.reachedEnd();
-            }
-        }
     }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/parse/HintNode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/HintNode.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/HintNode.java
index ea20114..068547f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/HintNode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/HintNode.java
@@ -82,7 +82,11 @@ public class HintNode {
         */
        NO_CACHE,
        /**
-        * Avoid using star-join optimization.
+        * Use sort-merge join algorithm instead of broadcast join (hash join) algorithm.
+        */
+       USE_SORT_MERGE_JOIN,
+       /**
+        * Avoid using star-join optimization. Used for broadcast join (hash join) only.
         */
        NO_STAR_JOIN,
        /**

http://git-wip-us.apache.org/repos/asf/phoenix/blob/eddc846d/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
index cc0b455..6d3123f 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
@@ -590,6 +590,11 @@ public class ParseNodeFactory {
         return new DeleteStatement(table, hint, node, orderBy, limit, bindCount);
     }
 
+    public SelectStatement select(SelectStatement statement, ParseNode where) {
+        return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(), statement.getHaving(),
+                statement.getOrderBy(), statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+    }
+
     public SelectStatement select(SelectStatement statement, ParseNode where, ParseNode having) {
         return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(), where, statement.getGroupBy(), having,
                 statement.getOrderBy(), statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
@@ -625,6 +630,12 @@ public class ParseNodeFactory {
                 statement.hasSequence());
     }
 
+    public SelectStatement select(SelectStatement statement, List<OrderByNode> orderBy) {
+        return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(),
+                statement.getWhere(), statement.getGroupBy(), statement.getHaving(), orderBy, statement.getLimit(),
+                statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+    }
+
     public SelectStatement select(SelectStatement statement, HintNode hint) {
         return hint == null || hint.isEmpty() ? statement : select(statement.getFrom(), hint, statement.isDistinct(), statement.getSelect(),
                 statement.getWhere(), statement.getGroupBy(), statement.getHaving(), statement.getOrderBy(), statement.getLimit(),