You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ma...@apache.org on 2015/04/06 16:46:51 UTC

phoenix git commit: PHOENIX-1580 Support UNION ALL

Repository: phoenix
Updated Branches:
  refs/heads/master c823be992 -> c50feca25


PHOENIX-1580 Support UNION ALL


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c50feca2
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c50feca2
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c50feca2

Branch: refs/heads/master
Commit: c50feca254f4c8ae2505d83f738a6ab9d92a9fd9
Parents: c823be9
Author: maryannxue <we...@intel.com>
Authored: Mon Apr 6 10:46:37 2015 -0400
Committer: maryannxue <we...@intel.com>
Committed: Mon Apr 6 10:46:37 2015 -0400

----------------------------------------------------------------------
 .../org/apache/phoenix/end2end/UnionAllIT.java  | 579 +++++++++++++++++++
 phoenix-core/src/main/antlr3/PhoenixSQL.g       |  48 +-
 .../apache/phoenix/compile/FromCompiler.java    |   4 +-
 .../apache/phoenix/compile/QueryCompiler.java   |  65 ++-
 .../phoenix/compile/StatementNormalizer.java    |   2 +-
 .../phoenix/compile/SubselectRewriter.java      |   5 +-
 .../apache/phoenix/compile/UnionCompiler.java   |  86 +++
 .../phoenix/exception/SQLExceptionCode.java     |   6 +
 .../apache/phoenix/execute/AggregatePlan.java   |   1 +
 .../org/apache/phoenix/execute/UnionPlan.java   | 190 ++++++
 .../iterate/MergeSortTopNResultIterator.java    |   9 +-
 .../phoenix/iterate/UnionResultIterators.java   | 109 ++++
 .../apache/phoenix/jdbc/PhoenixStatement.java   |  25 +-
 .../apache/phoenix/parse/ParseNodeFactory.java  |  39 +-
 .../apache/phoenix/parse/ParseNodeRewriter.java |   5 +-
 .../apache/phoenix/parse/SelectStatement.java   |  30 +-
 .../apache/phoenix/parse/QueryParserTest.java   |  13 -
 17 files changed, 1147 insertions(+), 69 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/it/java/org/apache/phoenix/end2end/UnionAllIT.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/UnionAllIT.java b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UnionAllIT.java
new file mode 100644
index 0000000..b3b2f7d
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/UnionAllIT.java
@@ -0,0 +1,579 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
+import java.sql.Statement;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.phoenix.exception.SQLExceptionCode;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+
+public class UnionAllIT extends BaseOwnClusterHBaseManagedTimeIT {
+
+    @BeforeClass
+    public static void doSetup() throws Exception {
+        Map<String, String> props = Collections.emptyMap();
+        setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+    }
+
+    @Test
+    public void testUnionAllSelects() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            String dml = "UPSERT INTO test_table VALUES(?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 10);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+            dml = "UPSERT INTO b_table VALUES(?, ?)";
+            stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "b");
+            stmt.setInt(2, 20);
+            stmt.execute();
+            stmt.setString(1, "c");
+            stmt.setInt(2, 20);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "select * from test_table union all select * from b_table union all select * from test_table";
+            ResultSet rs = conn.createStatement().executeQuery(ddl);
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(10,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("b",rs.getString(1));
+            assertEquals(20,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("c",rs.getString(1));
+            assertEquals(20,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(10,rs.getInt(2));
+            assertFalse(rs.next());  
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testAggregate() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            String dml = "UPSERT INTO test_table VALUES(?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 10);
+            stmt.execute();
+            stmt.setString(1, "d");
+            stmt.setInt(2, 40);
+            stmt.execute();
+            stmt.setString(1, "e");
+            stmt.setInt(2, 50);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+            dml = "UPSERT INTO b_table VALUES(?, ?)";
+            stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "b");
+            stmt.setInt(2, 20);
+            stmt.execute();
+            stmt.setString(1, "c");
+            stmt.setInt(2, 30);
+            stmt.execute();
+            conn.commit();
+
+            String aggregate = "select count(*) from test_table union all select count(*) from b_table union all select count(*) from test_table";
+            ResultSet rs = conn.createStatement().executeQuery(aggregate);
+            assertTrue(rs.next());
+            assertEquals(3,rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(2,rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(3,rs.getInt(1));
+            assertFalse(rs.next());  
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testGroupBy() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            String dml = "UPSERT INTO test_table VALUES(?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 10);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+            dml = "UPSERT INTO b_table VALUES(?, ?)";
+            stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "b");
+            stmt.setInt(2, 20);
+            stmt.execute();
+            stmt.setString(1, "c");
+            stmt.setInt(2, 30);
+            stmt.execute();
+            conn.commit();
+
+            String aggregate = "select count(*), col1 from test_table group by col1 union all select count(*), col1 from b_table group by col1";
+            ResultSet rs = conn.createStatement().executeQuery(aggregate);
+            assertTrue(rs.next());
+            assertEquals(1,rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(1,rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(1,rs.getInt(1)); 
+            assertFalse(rs.next());  
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testOrderByLimit() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table1 " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            String dml = "UPSERT INTO test_table1 VALUES(?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 10);
+            stmt.execute();
+            stmt.setString(1, "f");
+            stmt.setInt(2, 10);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "CREATE TABLE b_table1 " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+            dml = "UPSERT INTO b_table1 VALUES(?, ?)";
+            stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "b");
+            stmt.setInt(2, 20);
+            stmt.execute();
+            stmt.setString(1, "c");
+            stmt.setInt(2, 30);
+            stmt.execute();
+            stmt.setString(1, "d");
+            stmt.setInt(2, 30);
+            stmt.execute();
+            stmt.setString(1, "e");
+            stmt.setInt(2, 30);
+            stmt.execute();
+            conn.commit();
+
+            String aggregate = "select count(*), col1 from b_table1 group by col1 union all select count(*), col1 from test_table1 group by col1 order by col1";
+            ResultSet rs = conn.createStatement().executeQuery(aggregate);
+            assertTrue(rs.next());
+            assertEquals(2,rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(1,rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(3,rs.getInt(1));  
+            assertFalse(rs.next());  
+
+            String limit = "select count(*), col1 x from test_table1 group by col1 union all select count(*), col1 x from b_table1 group by col1 order by x limit 2";
+            rs = conn.createStatement().executeQuery(limit);
+            assertTrue(rs.next());
+            assertEquals(2,rs.getInt(1));
+            assertTrue(rs.next());
+            assertEquals(1,rs.getInt(1));
+            assertFalse(rs.next());  
+
+            String limitOnly = "select * from test_table1 union all select * from b_table1 limit 2";
+            rs = conn.createStatement().executeQuery(limitOnly);
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(10,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("f",rs.getString(1));
+            assertEquals(10,rs.getInt(2));
+            assertFalse(rs.next());  
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testSelectDiff() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            ddl = "select a_string, col1, col1 from test_table union all select * from b_table union all select a_string, col1 from test_table";
+            conn.createStatement().executeQuery(ddl);
+            fail();
+        }  catch (SQLException e) {
+            assertEquals(SQLExceptionCode.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS.getErrorCode(), e.getErrorCode());
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testJoinInUnionAll() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            String dml = "UPSERT INTO test_table VALUES(?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 10);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+
+            dml = "UPSERT INTO b_table VALUES(?, ?)";
+            stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 20);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "select x.a_string, y.col1  from test_table x, b_table y where x.a_string=y.a_string union all " +
+                    "select t.a_string, s.col1 from test_table s, b_table t where s.a_string=t.a_string"; 
+            ResultSet rs = conn.createStatement().executeQuery(ddl);
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(20,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(10,rs.getInt(2));
+            assertFalse(rs.next()); 
+
+            ddl = "select x.a_string, y.col1  from test_table x join b_table y on x.a_string=y.a_string union all " +
+                    "select t.a_string, s.col1 from test_table s inner join b_table t on s.a_string=t.a_string"; 
+            rs = conn.createStatement().executeQuery(ddl);
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(20,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(10,rs.getInt(2));
+            assertFalse(rs.next()); 
+
+            ddl = "select x.a_string, y.col1  from test_table x left join b_table y on x.a_string=y.a_string union all " +
+                    "select t.a_string, s.col1 from test_table s inner join b_table t on s.a_string=t.a_string union all " +
+                    "select y.a_string, x.col1 from b_table x right join test_table y on x.a_string=y.a_string";
+            rs = conn.createStatement().executeQuery(ddl);
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(20,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(10,rs.getInt(2)); 
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(20,rs.getInt(2)); 
+            assertFalse(rs.next()); 
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testDerivedTable() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            String dml = "UPSERT INTO test_table VALUES(?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 10);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            dml = "UPSERT INTO b_table VALUES(?, ?)";
+            stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 20);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "select * from (select x.a_string, y.col1  from test_table x, b_table y where x.a_string=y.a_string) union all " +
+                    "select * from (select t.a_string, s.col1 from test_table s, b_table t where s.a_string=t.a_string)"; 
+            ResultSet rs = conn.createStatement().executeQuery(ddl);
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(20,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(10,rs.getInt(2));
+            assertFalse(rs.next()); 
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testUnionAllInSubquery() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            ddl = "select a_string, col1 from test_table where a_string in (select a_string from test_table union all select a_string from b_table)";
+            conn.createStatement().executeQuery(ddl);
+        }  catch (SQLFeatureNotSupportedException e) {
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testUnionAllInSubqueryDerived() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            ddl = "select a_string, col1 from test_table where a_string in (select a_string from  " +
+                    "(select * from test_table union all select * from b_table))";
+            conn.createStatement().executeQuery(ddl);
+        }  catch (SQLException e) { 
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testUnionAllWithBindParam() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+            String dml = "UPSERT INTO test_table VALUES(?, ?)";
+            PreparedStatement stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "a");
+            stmt.setInt(2, 10);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+            dml = "UPSERT INTO b_table VALUES(?, ?)";
+            stmt = conn.prepareStatement(dml);
+            stmt.setString(1, "b");
+            stmt.setInt(2, 20);
+            stmt.execute();
+            conn.commit();
+
+            ddl = "select a_string, col1 from b_table where col1=? union all select a_string, col1 from test_table where col1=? ";
+            stmt = conn.prepareStatement(ddl);
+            stmt.setInt(1, 20);
+            stmt.setInt(2, 10);
+            ResultSet rs = stmt.executeQuery();
+            assertTrue(rs.next());
+            assertEquals("b",rs.getString(1));
+            assertEquals(20,rs.getInt(2));
+            assertTrue(rs.next());
+            assertEquals("a",rs.getString(1));
+            assertEquals(10,rs.getInt(2));
+            assertFalse(rs.next()); 
+        } finally {
+            conn.close();
+        }
+    }
+
+    @Test
+    public void testExplainUnionAll() throws Exception {
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        Connection conn = DriverManager.getConnection(getUrl(), props);
+        conn.setAutoCommit(false);
+
+        try {
+            String ddl = "CREATE TABLE test_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            ddl = "CREATE TABLE b_table " +
+                    "  (a_string varchar not null, col1 integer" +
+                    "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
+            createTestTable(getUrl(), ddl);
+
+            ddl = "explain select a_string, col1 from test_table union all select a_string, col1 from b_table order by col1 limit 1";
+            ResultSet rs = conn.createStatement().executeQuery(ddl);
+            assertEquals(
+                    "UNION ALL OVER 2 QUERIES\n" +
+                    "    CLIENT PARALLEL 1-WAY FULL SCAN OVER TEST_TABLE\n" + 
+                    "        SERVER TOP 1 ROW SORTED BY [COL1]\n" + 
+                    "    CLIENT MERGE SORT\n" + 
+                    "    CLIENT PARALLEL 1-WAY FULL SCAN OVER B_TABLE\n" + 
+                    "        SERVER TOP 1 ROW SORTED BY [COL1]\n" + 
+                    "    CLIENT MERGE SORT\n" + 
+                    "CLIENT MERGE SORT", QueryUtil.getExplainPlan(rs)); 
+            
+            String limitPlan = 
+                    "UNION ALL OVER 2 QUERIES\n" + 
+                    "    CLIENT SERIAL 1-WAY FULL SCAN OVER TEST_TABLE\n" + 
+                    "        SERVER 2 ROW LIMIT\n" + 
+                    "    CLIENT 2 ROW LIMIT\n" + 
+                    "    CLIENT SERIAL 1-WAY FULL SCAN OVER B_TABLE\n" + 
+                    "        SERVER 2 ROW LIMIT\n" + 
+                    "    CLIENT 2 ROW LIMIT\n" + 
+                    "CLIENT 2 ROW LIMIT";
+            ddl = "explain select a_string, col1 from test_table union all select a_string, col1 from b_table";
+            rs = conn.createStatement().executeQuery(ddl + " limit 2");
+            assertEquals(limitPlan, QueryUtil.getExplainPlan(rs));
+            Statement stmt = conn.createStatement();
+            stmt.setMaxRows(2);
+            rs = stmt.executeQuery(ddl);
+            assertEquals(limitPlan, QueryUtil.getExplainPlan(rs));
+            
+            ddl = "explain select a_string, col1 from test_table union all select a_string, col1 from b_table";
+            rs = conn.createStatement().executeQuery(ddl);
+            assertEquals(
+                    "UNION ALL OVER 2 QUERIES\n" + 
+                    "    CLIENT PARALLEL 1-WAY FULL SCAN OVER TEST_TABLE\n" + 
+                    "    CLIENT PARALLEL 1-WAY FULL SCAN OVER B_TABLE", QueryUtil.getExplainPlan(rs)); 
+        } finally {
+            conn.close();
+        }
+    } 
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/antlr3/PhoenixSQL.g
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g b/phoenix-core/src/main/antlr3/PhoenixSQL.g
index 61d5afa..03ec9f5 100644
--- a/phoenix-core/src/main/antlr3/PhoenixSQL.g
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -113,6 +113,7 @@ tokens
     TRACE='trace';
     ASYNC='async';
     SAMPLING='sampling';
+    UNION='union';
 }
 
 
@@ -351,19 +352,14 @@ statement returns [BindableStatement ret]
 
 // Parses a select statement which must be the only statement (expects an EOF after the statement).
 query returns [SelectStatement ret]
-    :   SELECT s=hinted_select_node EOF {$ret=s;}
+    :   s=select_node EOF {$ret=s;}
     ;
 
 // Parses a single SQL statement (expects an EOF after the select statement).
 oneStatement returns [BindableStatement ret]
-    :   (SELECT s=hinted_select_node {$ret=s;} 
-    |    ns=non_select_node {$ret=ns;}
-        )
-    ;
-
-non_select_node returns [BindableStatement ret]
 @init{ contextStack.push(new ParseContext()); }
-    :  (s=upsert_node
+    :  (s=select_node
+    |	s=upsert_node
     |   s=delete_node
     |   s=create_table_node
     |   s=create_view_node
@@ -578,40 +574,42 @@ dyn_column_name_or_def returns [ColumnDef ret]
             SortOrder.getDefault()); }
     ;
 
-select_expression returns [SelectStatement ret]
-    :  SELECT s=select_node {$ret = s;}
-    ;
-    
 subquery_expression returns [ParseNode ret]
-    :  s=select_expression {$ret = factory.subquery(s, false);}
+    :  s=select_node {$ret = factory.subquery(s, false);}
     ;
     
-// Parse a full select expression structure.
-select_node returns [SelectStatement ret]
+single_select returns [SelectStatement ret]
 @init{ contextStack.push(new ParseContext()); }
-    :   (d=DISTINCT | ALL)? sel=select_list
+    :   SELECT (h=hintClause)? 
+        (d=DISTINCT | ALL)? sel=select_list
         FROM from=parseFrom
         (WHERE where=expression)?
         (GROUP BY group=group_by)?
         (HAVING having=expression)?
-        (ORDER BY order=order_by)?
-        (LIMIT l=limit)?
-        { ParseContext context = contextStack.peek(); $ret = factory.select(from, null, d!=null, sel, where, group, having, order, l, getBindCount(), context.isAggregate(), context.hasSequences()); }
+        { ParseContext context = contextStack.peek(); $ret = factory.select(from, h, d!=null, sel, where, group, having, null, null, getBindCount(), context.isAggregate(), context.hasSequences()); }
     ;
 finally{ contextStack.pop(); }
 
+unioned_selects returns [List<SelectStatement> ret]
+@init{ret = new ArrayList<SelectStatement>();}
+    :   s=single_select {ret.add(s);} (UNION ALL s=single_select {ret.add(s);})*
+    ;
+    
 // Parse a full select expression structure.
-hinted_select_node returns [SelectStatement ret]
-    :   (hint=hintClause)? 
-        s=select_node
-        { $ret = factory.select(s, hint); }
+select_node returns [SelectStatement ret]
+@init{ contextStack.push(new ParseContext()); }
+    :   u=unioned_selects
+        (ORDER BY order=order_by)?
+        (LIMIT l=limit)?
+        { ParseContext context = contextStack.peek(); $ret = factory.select(u, order, l, getBindCount(), context.isAggregate()); }
     ;
+finally{ contextStack.pop(); }
 
 // Parse a full upsert expression structure.
 upsert_node returns [UpsertStatement ret]
     :   UPSERT (hint=hintClause)? INTO t=from_table_name
         (LPAREN p=upsert_column_refs RPAREN)?
-        ((VALUES LPAREN v=one_or_more_expressions RPAREN) | s=select_expression)
+        ((VALUES LPAREN v=one_or_more_expressions RPAREN) | s=select_node)
         {ret = factory.upsert(factory.namedTable(null,t,p == null ? null : p.getFirst()), hint, p == null ? null : p.getSecond(), v, s, getBindCount()); }
     ;
 
@@ -697,7 +695,7 @@ table_factor returns [TableNode ret]
     :   LPAREN t=table_list RPAREN { $ret = t; }
     |   n=bind_name ((AS)? alias=identifier)? { $ret = factory.bindTable(alias, factory.table(null,n)); } // TODO: review
     |   f=from_table_name ((AS)? alias=identifier)? (LPAREN cdefs=dyn_column_defs RPAREN)? { $ret = factory.namedTable(alias,f,cdefs); }
-    |   LPAREN SELECT s=hinted_select_node RPAREN ((AS)? alias=identifier)? { $ret = factory.derivedTable(alias, s); }
+    |   LPAREN s=select_node RPAREN ((AS)? alias=identifier)? { $ret = factory.derivedTable(alias, s); }
     ;
 
 join_type returns [JoinTableNode.JoinType ret]

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
index a57250e..98a1108 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/FromCompiler.java
@@ -65,6 +65,7 @@ import org.apache.phoenix.schema.PColumnImpl;
 import org.apache.phoenix.schema.PName;
 import org.apache.phoenix.schema.PNameFactory;
 import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.schema.PTableImpl;
 import org.apache.phoenix.schema.PTableKey;
 import org.apache.phoenix.schema.PTableType;
@@ -72,7 +73,6 @@ import org.apache.phoenix.schema.ProjectedColumn;
 import org.apache.phoenix.schema.SortOrder;
 import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.schema.TableRef;
-import org.apache.phoenix.schema.PTable.IndexType;
 import org.apache.phoenix.util.Closeables;
 import org.apache.phoenix.util.IndexUtil;
 import org.apache.phoenix.util.LogUtil;
@@ -163,6 +163,8 @@ public class FromCompiler {
     public static ColumnResolver getResolverForQuery(SelectStatement statement, PhoenixConnection connection)
     		throws SQLException {
     	TableNode fromNode = statement.getFrom();
+    	if (fromNode == null)
+    	    return EMPTY_TABLE_RESOLVER;
         if (fromNode instanceof NamedTableNode)
             return new SingleTableColumnResolver(connection, (NamedTableNode) fromNode, true, 1);
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
index 2276f4e..f8177e6 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/QueryCompiler.java
@@ -19,6 +19,7 @@ package org.apache.phoenix.compile;
 
 import java.sql.SQLException;
 import java.sql.SQLFeatureNotSupportedException;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Set;
@@ -40,6 +41,7 @@ import org.apache.phoenix.execute.ScanPlan;
 import org.apache.phoenix.execute.SortMergeJoinPlan;
 import org.apache.phoenix.execute.TupleProjectionPlan;
 import org.apache.phoenix.execute.TupleProjector;
+import org.apache.phoenix.execute.UnionPlan;
 import org.apache.phoenix.expression.Expression;
 import org.apache.phoenix.expression.LiteralExpression;
 import org.apache.phoenix.expression.RowValueConstructorExpression;
@@ -65,6 +67,7 @@ import org.apache.phoenix.schema.AmbiguousColumnException;
 import org.apache.phoenix.schema.ColumnNotFoundException;
 import org.apache.phoenix.schema.PDatum;
 import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.util.ScanUtil;
 
@@ -72,7 +75,6 @@ import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
 
-
 /**
  *
  * Class used to build an executable query plan
@@ -109,10 +111,6 @@ public class QueryCompiler {
         this(statement, select, resolver, Collections.<PDatum>emptyList(), null, new SequenceManager(statement), projectTuples);
     }
 
-    public QueryCompiler(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory, SequenceManager sequenceManager) throws SQLException {
-        this(statement, select, resolver, targetColumns, parallelIteratorFactory, sequenceManager, true);
-    }
-    
     public QueryCompiler(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory, SequenceManager sequenceManager, boolean projectTuples) throws SQLException {
         this.statement = statement;
         this.select = select;
@@ -135,6 +133,10 @@ public class QueryCompiler {
         this.originalScan = ScanUtil.newScan(scan);
     }
 
+    public QueryCompiler(PhoenixStatement statement, SelectStatement select, ColumnResolver resolver, List<? extends PDatum> targetColumns, ParallelIteratorFactory parallelIteratorFactory, SequenceManager sequenceManager) throws SQLException {
+        this(statement, select, resolver, targetColumns, parallelIteratorFactory, sequenceManager, true);
+    }
+
     /**
      * Builds an executable query plan from a parsed SQL statement
      * @return executable query plan
@@ -146,7 +148,42 @@ public class QueryCompiler {
      * @throws AmbiguousColumnException if an unaliased column name is ambiguous across multiple tables
      */
     public QueryPlan compile() throws SQLException{
-        SelectStatement select = this.select;
+        QueryPlan plan;
+        if (select.isUnion()) {
+            plan = compileUnionAll(select);
+        } else {
+            plan = compileSelect(select);
+        }
+        return plan;
+    }
+
+    public QueryPlan compileUnionAll(SelectStatement select) throws SQLException { 
+        List<SelectStatement> unionAllSelects = select.getSelects();
+        List<QueryPlan> plans = new ArrayList<QueryPlan>();
+
+        for (int i=0; i < unionAllSelects.size(); i++ ) {
+            SelectStatement subSelect = unionAllSelects.get(i);
+            // Push down order-by and limit into sub-selects.
+            if (!select.getOrderBy().isEmpty() || select.getLimit() != null) {
+                subSelect = NODE_FACTORY.select(subSelect, select.getOrderBy(), select.getLimit());
+            }
+            QueryPlan subPlan = compileSubquery(subSelect, true);
+            TupleProjector projector = new TupleProjector(subPlan.getProjector());
+            subPlan = new TupleProjectionPlan(subPlan, projector, null);
+            plans.add(subPlan);
+        }
+        UnionCompiler.checkProjectionNumAndTypes(plans);
+
+        TableRef tableRef = UnionCompiler.contructSchemaTable(statement, plans.get(0));
+        ColumnResolver resolver = FromCompiler.getResolver(tableRef);
+        StatementContext context = new StatementContext(statement, resolver, scan, sequenceManager);
+
+        QueryPlan plan = compileSingleFlatQuery(context, select, statement.getParameters(), false, false, null, null, false);
+        plan =  new UnionPlan(context, select, tableRef, plan.getProjector(), plan.getLimit(), plan.getOrderBy(), GroupBy.EMPTY_GROUP_BY, plans, null); 
+        return plan;
+    }
+
+    public QueryPlan compileSelect(SelectStatement select) throws SQLException{
         List<Object> binds = statement.getParameters();
         StatementContext context = new StatementContext(statement, resolver, scan, sequenceManager);
         if (select.isJoin()) {
@@ -161,7 +198,7 @@ public class QueryCompiler {
             return compileSingleQuery(context, select, binds, false, true);
         }
     }
-    
+
     /*
      * Call compileJoinQuery() for join queries recursively down to the leaf JoinTable nodes.
      * This matches the input JoinTable node against patterns in the following order:
@@ -207,7 +244,7 @@ public class QueryCompiler {
                 table.projectColumns(context.getScan());
                 return compileSingleQuery(context, subquery, binds, asSubquery, !asSubquery);
             }
-            QueryPlan plan = compileSubquery(subquery);
+            QueryPlan plan = compileSubquery(subquery, false);
             PTable projectedTable = table.createProjectedTable(plan.getProjector());
             context.setResolver(FromCompiler.getResolverForProjectedTable(projectedTable));
             return new TupleProjectionPlan(plan, new TupleProjector(plan.getProjector()), table.compilePostFilterExpression(context));
@@ -229,7 +266,7 @@ public class QueryCompiler {
                 tupleProjector = new TupleProjector(initialProjectedTable);
             } else {
                 SelectStatement subquery = table.getAsSubquery(orderBy);
-                QueryPlan plan = compileSubquery(subquery);
+                QueryPlan plan = compileSubquery(subquery, false);
                 initialProjectedTable = table.createProjectedTable(plan.getProjector());
                 tableRef = plan.getTableRef();
                 context.getScan().setFamilyMap(plan.getContext().getScan().getFamilyMap());
@@ -309,7 +346,7 @@ public class QueryCompiler {
                 tupleProjector = new TupleProjector(rhsProjTable);
             } else {
                 SelectStatement subquery = rhsTable.getAsSubquery(orderBy);
-                QueryPlan plan = compileSubquery(subquery);
+                QueryPlan plan = compileSubquery(subquery, false);
                 rhsProjTable = rhsTable.createProjectedTable(plan.getProjector());
                 rhsTableRef = plan.getTableRef();
                 context.getScan().setFamilyMap(plan.getContext().getScan().getFamilyMap());
@@ -425,7 +462,7 @@ public class QueryCompiler {
         return type == JoinType.Semi && complete;
     }
 
-    protected QueryPlan compileSubquery(SelectStatement subquery) throws SQLException {
+    protected QueryPlan compileSubquery(SelectStatement subquery, boolean pushDownMaxRows) throws SQLException {
         PhoenixConnection connection = this.statement.getConnection();
         subquery = SubselectRewriter.flatten(subquery, connection);
         ColumnResolver resolver = FromCompiler.getResolverForQuery(subquery, connection);
@@ -436,7 +473,7 @@ public class QueryCompiler {
             subquery = StatementNormalizer.normalize(transformedSubquery, resolver);
         }
         int maxRows = this.statement.getMaxRows();
-        this.statement.setMaxRows(0); // overwrite maxRows to avoid its impact on inner queries.
+        this.statement.setMaxRows(pushDownMaxRows ? maxRows : 0); // overwrite maxRows to avoid its impact on inner queries.
         QueryPlan plan = new QueryCompiler(this.statement, subquery, resolver, false).compile();
         plan = statement.getConnection().getQueryServices().getOptimizer().optimize(statement, plan);
         this.statement.setMaxRows(maxRows); // restore maxRows.
@@ -449,7 +486,7 @@ public class QueryCompiler {
             return compileSingleFlatQuery(context, select, binds, asSubquery, allowPageFilter, null, null, true);
         }
 
-        QueryPlan innerPlan = compileSubquery(innerSelect);
+        QueryPlan innerPlan = compileSubquery(innerSelect, false);
         TupleProjector tupleProjector = new TupleProjector(innerPlan.getProjector());
         innerPlan = new TupleProjectionPlan(innerPlan, tupleProjector, null);
 
@@ -526,7 +563,7 @@ public class QueryCompiler {
             int i = 0;
             for (SubqueryParseNode subqueryNode : subqueries) {
                 SelectStatement stmt = subqueryNode.getSelectNode();
-                subPlans[i++] = new WhereClauseSubPlan(compileSubquery(stmt), stmt, subqueryNode.expectSingleRow());
+                subPlans[i++] = new WhereClauseSubPlan(compileSubquery(stmt, false), stmt, subqueryNode.expectSingleRow());
             }
             plan = HashJoinPlan.create(select, plan, null, subPlans);
         }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java
index f6a6f7a..b9897b1 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/StatementNormalizer.java
@@ -99,7 +99,7 @@ public class StatementNormalizer extends ParseNodeRewriter {
             if (selectNodes != normSelectNodes) {
                 statement = NODE_FACTORY.select(statement.getFrom(), statement.getHint(), statement.isDistinct(),
                         normSelectNodes, statement.getWhere(), statement.getGroupBy(), statement.getHaving(), statement.getOrderBy(),
-                        statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+                        statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence(), statement.getSelects());
             }
         }
         

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
index 805894f..6862802 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/SubselectRewriter.java
@@ -70,6 +70,8 @@ public class SubselectRewriter extends ParseNodeRewriter {
         while (from != null && from instanceof DerivedTableNode) {
             DerivedTableNode derivedTable = (DerivedTableNode) from;
             SelectStatement subselect = derivedTable.getSelect();
+            if (subselect.isUnion())
+                break;
             ColumnResolver resolver = FromCompiler.getResolverForQuery(subselect, connection);
             SubselectRewriter rewriter = new SubselectRewriter(resolver, subselect.getSelect(), derivedTable.getAlias());
             SelectStatement ret = rewriter.flatten(select, subselect);
@@ -202,7 +204,8 @@ public class SubselectRewriter extends ParseNodeRewriter {
             isAggregateRewrite = true;
         }
         
-        return NODE_FACTORY.select(subselect.getFrom(), hintRewrite, isDistinctRewrite, selectNodesRewrite, whereRewrite, groupByRewrite, havingRewrite, orderByRewrite, limitRewrite, select.getBindCount(), isAggregateRewrite, select.hasSequence());
+        return NODE_FACTORY.select(subselect.getFrom(), hintRewrite, isDistinctRewrite, selectNodesRewrite, whereRewrite, groupByRewrite, 
+            havingRewrite, orderByRewrite, limitRewrite, select.getBindCount(), isAggregateRewrite, select.hasSequence(), select.getSelects());
     }
     
     private SelectStatement applyPostFilters(SelectStatement statement, List<ParseNode> postFilters) throws SQLException {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java b/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
new file mode 100644
index 0000000..3f069ff
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/compile/UnionCompiler.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.compile;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.phoenix.exception.SQLExceptionCode;
+import org.apache.phoenix.exception.SQLExceptionInfo;
+import org.apache.phoenix.expression.Expression;
+import org.apache.phoenix.jdbc.PhoenixStatement;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PColumnImpl;
+import org.apache.phoenix.schema.PName;
+import org.apache.phoenix.schema.PNameFactory;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableImpl;
+import org.apache.phoenix.schema.PTableType;
+import org.apache.phoenix.schema.TableRef;
+import org.apache.phoenix.schema.types.PDataType;
+
+public class UnionCompiler {
+    private static final PName UNION_FAMILY_NAME = PNameFactory.newName("unionFamilyName");
+    private static final PName UNION_SCHEMA_NAME = PNameFactory.newName("unionSchemaName");
+    private static final PName UNION_TABLE_NAME = PNameFactory.newName("unionTableName");
+
+    public static List<QueryPlan> checkProjectionNumAndTypes(List<QueryPlan> selectPlans) throws SQLException {
+        QueryPlan plan = selectPlans.get(0);
+        int columnCount = plan.getProjector().getColumnCount();
+        List<? extends ColumnProjector> projectors = plan.getProjector().getColumnProjectors();
+        List<PDataType> selectTypes = new ArrayList<PDataType>();
+        for (ColumnProjector pro : projectors) {
+            selectTypes.add(pro.getExpression().getDataType());
+        }
+
+        for (int i = 1;  i < selectPlans.size(); i++) {     
+            plan = selectPlans.get(i);
+            if (columnCount !=plan.getProjector().getColumnCount()) {
+                throw new SQLExceptionInfo.Builder(SQLExceptionCode.SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS).setMessage(".").build().buildException();
+            }
+            List<? extends ColumnProjector> pros =  plan.getProjector().getColumnProjectors();
+            for (int j = 0; j < columnCount; j++) {
+                PDataType type = pros.get(j).getExpression().getDataType();
+                if (!type.isCoercibleTo(selectTypes.get(j))) {
+                    throw new SQLExceptionInfo.Builder(SQLExceptionCode.SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS).setMessage(".").build().buildException();
+                }
+            }
+        }
+        return selectPlans;
+    }
+
+    public static TableRef contructSchemaTable(PhoenixStatement statement, QueryPlan plan) throws SQLException {
+        List<PColumn> projectedColumns = new ArrayList<PColumn>();
+        for (int i=0; i< plan.getProjector().getColumnCount(); i++) {
+            ColumnProjector colProj = plan.getProjector().getColumnProjector(i);
+            Expression sourceExpression = colProj.getExpression();
+            PColumnImpl projectedColumn = new PColumnImpl(PNameFactory.newName(colProj.getName()), UNION_FAMILY_NAME,
+                    sourceExpression.getDataType(), sourceExpression.getMaxLength(), sourceExpression.getScale(), sourceExpression.isNullable(),
+                    i, sourceExpression.getSortOrder(), 500, null, false, sourceExpression.toString());
+            projectedColumns.add(projectedColumn);
+        }
+        Long scn = statement.getConnection().getSCN();
+        PTable tempTable = PTableImpl.makePTable(statement.getConnection().getTenantId(), UNION_SCHEMA_NAME, UNION_TABLE_NAME, 
+                PTableType.SUBQUERY, null, HConstants.LATEST_TIMESTAMP, scn == null ? HConstants.LATEST_TIMESTAMP : scn, null, null, projectedColumns, null, null, null,
+                        true, null, null, null, true, true, true, null, null, null);
+        TableRef tableRef = new TableRef(null, tempTable, 0, false);
+        return tableRef;
+    }
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
index 2eea53b..9c38348 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/exception/SQLExceptionCode.java
@@ -160,6 +160,12 @@ public enum SQLExceptionCode {
      STATELESS_EXPRESSION_NOT_ALLOWED_IN_INDEX(522, "42899", "Stateless expression not allowed in an index"),
 
      /** 
+      * Union All related errors
+      */
+     SELECT_COLUMN_NUM_IN_UNIONALL_DIFFS(525, "42902", "SELECT column number differs in a Union All query is not allowed"),
+     SELECT_COLUMN_TYPE_IN_UNIONALL_DIFFS(526, "42903", "SELECT column types differ in a Union All query is not allowed"),
+
+     /** 
      * HBase and Phoenix specific implementation defined sub-classes.
      * Column family related exceptions.
      * 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/execute/AggregatePlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/AggregatePlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/AggregatePlan.java
index 617cc48..4f344b6 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/execute/AggregatePlan.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/AggregatePlan.java
@@ -177,6 +177,7 @@ public class AggregatePlan extends BaseQueryPlan {
         }
         ParallelIterators parallelIterators = new ParallelIterators(this, null, wrapParallelIteratorFactory());
         splits = parallelIterators.getSplits();
+        scans = parallelIterators.getScans();
 
         AggregatingResultIterator aggResultIterator;
         // No need to merge sort for ungrouped aggregation

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/execute/UnionPlan.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/execute/UnionPlan.java b/phoenix-core/src/main/java/org/apache/phoenix/execute/UnionPlan.java
new file mode 100644
index 0000000..973f37e
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/execute/UnionPlan.java
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.execute;
+
+import java.sql.ParameterMetaData;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.phoenix.compile.ExplainPlan;
+import org.apache.phoenix.compile.GroupByCompiler.GroupBy;
+import org.apache.phoenix.compile.OrderByCompiler.OrderBy;
+import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.compile.RowProjector;
+import org.apache.phoenix.compile.ScanRanges;
+import org.apache.phoenix.compile.StatementContext;
+import org.apache.phoenix.iterate.ConcatResultIterator;
+import org.apache.phoenix.iterate.LimitingResultIterator;
+import org.apache.phoenix.iterate.MergeSortTopNResultIterator;
+import org.apache.phoenix.iterate.ResultIterator;
+import org.apache.phoenix.iterate.UnionResultIterators;
+import org.apache.phoenix.parse.FilterableStatement;
+import org.apache.phoenix.query.KeyRange;
+import org.apache.phoenix.schema.TableRef;
+import org.apache.phoenix.util.SQLCloseable;
+
+
+public class UnionPlan implements QueryPlan {
+    private static final long DEFAULT_ESTIMATED_SIZE = 10 * 1024; // 10 K
+
+    private final TableRef tableRef;
+    private final FilterableStatement statement;
+    private final ParameterMetaData paramMetaData;
+    private final OrderBy orderBy;
+    private final StatementContext context;
+    private final Integer limit;
+    private final GroupBy groupBy;
+    private final RowProjector projector;
+    private final boolean isDegenerate;
+    private final List<QueryPlan> plans;
+    private UnionResultIterators iterators;
+
+    public UnionPlan(StatementContext context, FilterableStatement statement, TableRef table, RowProjector projector,
+            Integer limit, OrderBy orderBy, GroupBy groupBy, List<QueryPlan> plans, ParameterMetaData paramMetaData) throws SQLException {
+        this.context = context;
+        this.statement = statement;
+        this.tableRef = table;
+        this.projector = projector;
+        this.limit = limit;
+        this.orderBy = orderBy;
+        this.groupBy = groupBy;
+        this.plans = plans;
+        this.paramMetaData = paramMetaData;
+        boolean isDegen = true;
+        for (QueryPlan plan : plans) {           
+            if (plan.getContext().getScanRanges() != ScanRanges.NOTHING) {
+                isDegen = false;
+                break;
+            } 
+        }
+        this.isDegenerate = isDegen;     
+    }
+
+    @Override
+    public boolean isDegenerate() {
+        return isDegenerate;
+    }
+
+    @Override
+    public List<KeyRange> getSplits() {
+        if (iterators == null)
+            return null;
+        return iterators.getSplits();
+    }
+
+    @Override
+    public List<List<Scan>> getScans() {
+        if (iterators == null)
+            return null;
+        return iterators.getScans();
+    }
+
+    @Override
+    public GroupBy getGroupBy() {
+        return groupBy;
+    }
+
+    @Override
+    public OrderBy getOrderBy() {
+        return orderBy;
+    }
+
+    @Override
+    public TableRef getTableRef() {
+        return tableRef;
+    }
+
+    @Override
+    public Integer getLimit() {
+        return limit;
+    }
+
+    @Override
+    public RowProjector getProjector() {
+        return projector;
+    }
+
+    @Override
+    public final ResultIterator iterator() throws SQLException {
+        return iterator(Collections.<SQLCloseable>emptyList());
+    }
+
+    public final ResultIterator iterator(final List<? extends SQLCloseable> dependencies) throws SQLException {
+        this.iterators = new UnionResultIterators(plans);
+        ResultIterator scanner;      
+        boolean isOrdered = !orderBy.getOrderByExpressions().isEmpty();
+
+        if (isOrdered) { // TopN
+            scanner = new MergeSortTopNResultIterator(iterators, limit, orderBy.getOrderByExpressions());
+        } else {
+            scanner = new ConcatResultIterator(iterators);
+            if (limit != null) {
+                scanner = new LimitingResultIterator(scanner, limit);
+            }          
+        }
+        return scanner;
+    }
+
+    @Override
+    public ExplainPlan getExplainPlan() throws SQLException {
+        List<String> steps = new ArrayList<String>();
+        steps.add("UNION ALL OVER " + this.plans.size() + " QUERIES");
+        ResultIterator iterator = iterator();
+        iterator.explain(steps);
+        // Indent plans steps nested under union, except last client-side merge/concat step (if there is one)
+        int offset = !orderBy.getOrderByExpressions().isEmpty() || limit != null ? 1 : 0;
+        for (int i = 1 ; i < steps.size()-offset; i++) {
+            steps.set(i, "    " + steps.get(i));
+        }
+        return new ExplainPlan(steps);
+    }
+
+
+    @Override
+    public long getEstimatedSize() {
+        return DEFAULT_ESTIMATED_SIZE;
+    }
+
+    @Override
+    public ParameterMetaData getParameterMetaData() {
+        return paramMetaData;
+    }
+
+    @Override
+    public FilterableStatement getStatement() {
+        return statement;
+    }
+
+    @Override
+    public StatementContext getContext() {
+        return context;
+    }
+
+    @Override
+    public boolean isRowKeyOrdered() {
+        return groupBy.isEmpty() ? orderBy.getOrderByExpressions().isEmpty() : groupBy.isOrderPreserving();
+    }
+
+    public List<QueryPlan> getPlans() {
+        return this.plans;
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/iterate/MergeSortTopNResultIterator.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/MergeSortTopNResultIterator.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/MergeSortTopNResultIterator.java
index 71259e0..87a6a62 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/iterate/MergeSortTopNResultIterator.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/MergeSortTopNResultIterator.java
@@ -37,15 +37,22 @@ import org.apache.phoenix.schema.tuple.Tuple;
 public class MergeSortTopNResultIterator extends MergeSortResultIterator {
 
     private final int limit;
+    private final boolean clientSideOnly;
     private int count = 0;
     private final List<OrderByExpression> orderByColumns;
     private final ImmutableBytesWritable ptr1 = new ImmutableBytesWritable();
     private final ImmutableBytesWritable ptr2 = new ImmutableBytesWritable();
     
-    public MergeSortTopNResultIterator(ResultIterators iterators, Integer limit, List<OrderByExpression> orderByColumns) {
+    public MergeSortTopNResultIterator(ResultIterators iterators, Integer limit,
+            List<OrderByExpression> orderByColumns, boolean clientSideOnly) {
         super(iterators);
         this.limit = limit == null ? -1 : limit;
         this.orderByColumns = orderByColumns;
+        this.clientSideOnly = clientSideOnly;
+    }
+
+    public MergeSortTopNResultIterator(ResultIterators iterators, Integer limit, List<OrderByExpression> orderByColumns) {
+        this(iterators, limit, orderByColumns, false);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/iterate/UnionResultIterators.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/iterate/UnionResultIterators.java b/phoenix-core/src/main/java/org/apache/phoenix/iterate/UnionResultIterators.java
new file mode 100644
index 0000000..b7c8b21
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/phoenix/iterate/UnionResultIterators.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.iterate;
+
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.query.KeyRange;
+import org.apache.phoenix.util.ServerUtil;
+
+import com.google.common.collect.Lists;
+
+
+/**
+ *
+ * Create a union ResultIterators
+ *
+ * 
+ */
+public class UnionResultIterators implements ResultIterators {
+    private final List<KeyRange> splits;
+    private final List<List<Scan>> scans;
+    private final List<PeekingResultIterator> iterators;
+    private final List<QueryPlan> plans;
+
+    public UnionResultIterators(List<QueryPlan> plans) throws SQLException {
+        this.plans = plans;
+        int nPlans = plans.size();
+        iterators = Lists.newArrayListWithExpectedSize(nPlans);
+        splits = Lists.newArrayListWithExpectedSize(nPlans * 30); 
+        scans = Lists.newArrayListWithExpectedSize(nPlans * 10); 
+        for (QueryPlan plan : this.plans) {
+            iterators.add(LookAheadResultIterator.wrap(plan.iterator()));
+            splits.addAll(plan.getSplits()); 
+            scans.addAll(plan.getScans());
+        }
+    }
+
+    @Override
+    public List<KeyRange> getSplits() {
+        return splits;
+    }
+
+    @Override
+    public void close() throws SQLException {   
+        SQLException toThrow = null;
+        try {
+            if (iterators != null) {
+                for (int index=0; index < iterators.size(); index++) {
+                    PeekingResultIterator iterator = iterators.get(index);
+                    try {
+                        iterator.close();
+                    } catch (Exception e) {
+                        if (toThrow == null) {
+                            toThrow = ServerUtil.parseServerException(e);
+                        } else {
+                            toThrow.setNextException(ServerUtil.parseServerException(e));
+                        }
+                    }
+                }
+            }
+        } catch (Exception e) {
+            toThrow = ServerUtil.parseServerException(e);
+        } finally {
+            if (toThrow != null) {
+                throw toThrow;
+            }
+        }
+    }
+
+    @Override
+    public List<List<Scan>> getScans() {
+        return scans;
+    }
+
+    @Override
+    public int size() {
+        return scans.size();
+    }
+
+    @Override
+    public void explain(List<String> planSteps) {
+        for (int index=0; index < iterators.size(); index++) {
+            iterators.get(index).explain(planSteps);
+        }
+    }
+
+    @Override 
+    public List<PeekingResultIterator> getIterators() throws SQLException {    
+        return iterators;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
index ee6b016..462e1f0 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixStatement.java
@@ -330,9 +330,15 @@ public class PhoenixStatement implements Statement, SQLCloseable, org.apache.pho
     private static class ExecutableSelectStatement extends SelectStatement implements CompilableStatement {
         private ExecutableSelectStatement(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
                 List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate, boolean hasSequence) {
-            super(from, hint, isDistinct, select, where, groupBy, having, orderBy, limit, bindCount, isAggregate, hasSequence);
+            this(from, hint, isDistinct, select, where, groupBy, having, orderBy, limit, bindCount, isAggregate, hasSequence, Collections.<SelectStatement>emptyList());
         }
 
+        private ExecutableSelectStatement(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
+                List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate,
+                boolean hasSequence, List<SelectStatement> selects) {
+            super(from, hint, isDistinct, select, where, groupBy, having, orderBy, limit, bindCount, isAggregate, hasSequence, selects);
+        }
+        
         @SuppressWarnings("unchecked")
         @Override
         public QueryPlan compilePlan(PhoenixStatement stmt, Sequence.ValueOp seqAction) throws SQLException {
@@ -486,7 +492,6 @@ public class PhoenixStatement implements Statement, SQLCloseable, org.apache.pho
                 public boolean isRowKeyOrdered() {
                     return true;
                 }
-                
             };
         }
     }
@@ -894,12 +899,20 @@ public class PhoenixStatement implements Statement, SQLCloseable, org.apache.pho
     protected static class ExecutableNodeFactory extends ParseNodeFactory {
         @Override
         public ExecutableSelectStatement select(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select,
-                                                ParseNode where, List<ParseNode> groupBy, ParseNode having,
-                                                List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate, boolean hasSequence) {
+                ParseNode where, List<ParseNode> groupBy, ParseNode having,
+                List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate, boolean hasSequence) {
+            return this.select(from, hint, isDistinct, select, where, groupBy, having, orderBy, limit, bindCount, isAggregate, hasSequence, 
+                Collections.<SelectStatement>emptyList());
+        }
+
+        @Override
+        public ExecutableSelectStatement select(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
+                List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate,
+                boolean hasSequence, List<SelectStatement> selects) {
             return new ExecutableSelectStatement(from, hint, isDistinct, select, where, groupBy == null ? Collections.<ParseNode>emptyList() : groupBy,
-                    having, orderBy == null ? Collections.<OrderByNode>emptyList() : orderBy, limit, bindCount, isAggregate, hasSequence);
+                    having, orderBy == null ? Collections.<OrderByNode>emptyList() : orderBy, limit, bindCount, isAggregate, hasSequence, selects);
         }
-        
+
         @Override
         public ExecutableUpsertStatement upsert(NamedTableNode table, HintNode hintNode, List<ColumnName> columns, List<ParseNode> values, SelectStatement select, int bindCount) {
             return new ExecutableUpsertStatement(table, hintNode, columns, values, select, bindCount);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
index 62db00a..5aba933 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeFactory.java
@@ -19,6 +19,7 @@ package org.apache.phoenix.parse;
 
 import java.lang.reflect.Constructor;
 import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -611,6 +612,13 @@ public class ParseNodeFactory {
         return new OrderByNode(expression, nullsLast, orderAscending);
     }
 
+    public SelectStatement select(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
+            List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate, 
+            boolean hasSequence, List<SelectStatement> selects) {
+
+        return new SelectStatement(from, hint, isDistinct, select, where, groupBy == null ? Collections.<ParseNode>emptyList() : groupBy, having,
+                orderBy == null ? Collections.<OrderByNode>emptyList() : orderBy, limit, bindCount, isAggregate, hasSequence, selects);
+    } 
 
     public SelectStatement select(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select, ParseNode where,
             List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate, boolean hasSequence) {
@@ -618,7 +626,7 @@ public class ParseNodeFactory {
         return new SelectStatement(from, hint, isDistinct, select, where, groupBy == null ? Collections.<ParseNode>emptyList() : groupBy, having,
                 orderBy == null ? Collections.<OrderByNode>emptyList() : orderBy, limit, bindCount, isAggregate, hasSequence);
     }
-
+    
     public UpsertStatement upsert(NamedTableNode table, HintNode hint, List<ColumnName> columns, List<ParseNode> values, SelectStatement select, int bindCount) {
         return new UpsertStatement(table, hint, columns, values, select, bindCount);
     }
@@ -681,7 +689,7 @@ public class ParseNodeFactory {
     public SelectStatement select(SelectStatement statement, HintNode hint) {
         return hint == null || hint.isEmpty() ? statement : select(statement.getFrom(), hint, statement.isDistinct(), statement.getSelect(),
                 statement.getWhere(), statement.getGroupBy(), statement.getHaving(), statement.getOrderBy(), statement.getLimit(),
-                statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+                statement.getBindCount(), statement.isAggregate(), statement.hasSequence(), statement.getSelects());
     }
 
     public SelectStatement select(SelectStatement statement, HintNode hint, ParseNode where) {
@@ -690,13 +698,36 @@ public class ParseNodeFactory {
                 statement.hasSequence());
     }
 
+    public SelectStatement select(SelectStatement statement, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate) {
+        return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(),
+            statement.getWhere(), statement.getGroupBy(), statement.getHaving(), orderBy, limit,
+            bindCount, isAggregate || statement.isAggregate(), statement.hasSequence());
+
+    }
+
     public SelectStatement select(SelectStatement statement, LimitNode limit) {
         return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(),
-                statement.getWhere(), statement.getGroupBy(), statement.getHaving(), statement.getOrderBy(), limit,
-                statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+            statement.getWhere(), statement.getGroupBy(), statement.getHaving(), statement.getOrderBy(), limit,
+            statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+    }
+
+    public SelectStatement select(SelectStatement statement, List<OrderByNode> orderBy, LimitNode limit) {
+        return select(statement.getFrom(), statement.getHint(), statement.isDistinct(), statement.getSelect(),
+            statement.getWhere(), statement.getGroupBy(), statement.getHaving(), orderBy, limit,
+            statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+    }
+
+    public SelectStatement select(List<SelectStatement> statements, List<OrderByNode> orderBy, LimitNode limit, int bindCount, boolean isAggregate) {
+        if (statements.size() == 1)
+            return select(statements.get(0), orderBy, limit, bindCount, isAggregate);
+        
+        return select(null, HintNode.EMPTY_HINT_NODE, false, Lists.newArrayList(aliasedNode(null, wildcard())), 
+                null, null, null, orderBy, limit, bindCount, false, false, statements);
     }
 
     public SubqueryParseNode subquery(SelectStatement select, boolean expectSingleRow) {
+        if (select.isUnion()) 
+            throw new RuntimeException(new SQLFeatureNotSupportedException());
         return new SubqueryParseNode(select, expectSingleRow);
     }
 

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
index c6514dc..4ce893d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/ParseNodeRewriter.java
@@ -58,7 +58,7 @@ public class ParseNodeRewriter extends TraverseAllParseNodeVisitor<ParseNode> {
     public static SelectStatement rewrite(SelectStatement statement, ParseNodeRewriter rewriter) throws SQLException {
         Map<String,ParseNode> aliasMap = rewriter.getAliasMap();
         TableNode from = statement.getFrom();
-        TableNode normFrom = from.accept(new TableNodeRewriter(rewriter));
+        TableNode normFrom = from == null ? null : from.accept(new TableNodeRewriter(rewriter));
         ParseNode where = statement.getWhere();
         ParseNode normWhere = where;
         if (where != null) {
@@ -150,7 +150,8 @@ public class ParseNodeRewriter extends TraverseAllParseNodeVisitor<ParseNode> {
         }
         return NODE_FACTORY.select(normFrom, statement.getHint(), statement.isDistinct(),
                 normSelectNodes, normWhere, normGroupByNodes, normHaving, normOrderByNodes,
-                statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence());
+                statement.getLimit(), statement.getBindCount(), statement.isAggregate(), statement.hasSequence(),
+                statement.getSelects());
     }
 
     private Map<String, ParseNode> getAliasMap() {

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java b/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
index 71cabd6..08cec87 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/parse/SelectStatement.java
@@ -17,6 +17,7 @@
  */
 package org.apache.phoenix.parse;
 
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
@@ -81,6 +82,14 @@ public class SelectStatement implements FilterableStatement {
                 select.getOrderBy(), select.getLimit(), select.getBindCount(), select.isAggregate(), select.hasSequence());
     }
     
+    // Copy constructor for sub select statements in a union
+    public static SelectStatement create(SelectStatement select, 
+            List<OrderByNode> orderBy, LimitNode limit, boolean isAggregate) {
+        return new SelectStatement(select.getFrom(), select.getHint(), select.isDistinct(), 
+                select.getSelect(), select.getWhere(), select.getGroupBy(), select.getHaving(), 
+                orderBy, limit, select.getBindCount(), isAggregate, select.hasSequence());
+    }
+
     private final TableNode fromTable;
     private final HintNode hint;
     private final boolean isDistinct;
@@ -93,6 +102,7 @@ public class SelectStatement implements FilterableStatement {
     private final int bindCount;
     private final boolean isAggregate;
     private final boolean hasSequence;
+    private final List<SelectStatement> selects = new ArrayList<SelectStatement>();
     
     @Override
     public final String toString() {
@@ -205,7 +215,7 @@ public class SelectStatement implements FilterableStatement {
     
     protected SelectStatement(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select,
             ParseNode where, List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit,
-            int bindCount, boolean isAggregate, boolean hasSequence) {
+            int bindCount, boolean isAggregate, boolean hasSequence, List<SelectStatement> selects) {
         this.fromTable = from;
         this.hint = hint == null ? HintNode.EMPTY_HINT_NODE : hint;
         this.isDistinct = isDistinct;
@@ -218,6 +228,16 @@ public class SelectStatement implements FilterableStatement {
         this.bindCount = bindCount;
         this.isAggregate = isAggregate || groupBy.size() != countConstants(groupBy) || this.having != null;
         this.hasSequence = hasSequence;
+        if (!selects.isEmpty()) {
+            this.selects.addAll(selects);
+        }
+    }
+
+    public SelectStatement(TableNode from, HintNode hint, boolean isDistinct, List<AliasedNode> select,
+            ParseNode where, List<ParseNode> groupBy, ParseNode having, List<OrderByNode> orderBy, LimitNode limit,
+            int bindCount, boolean isAggregate, boolean hasSequence) {
+        this(from, hint, isDistinct, select, where, groupBy, having, orderBy, limit, bindCount, isAggregate, hasSequence,
+                Collections.<SelectStatement>emptyList());
     }
     
     @Override
@@ -298,4 +318,12 @@ public class SelectStatement implements FilterableStatement {
         
         return ((DerivedTableNode) fromTable).getSelect();
     }
+
+    public boolean isUnion() {
+        return !getSelects().isEmpty();
+    }
+
+    public List<SelectStatement> getSelects() {
+        return selects;
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c50feca2/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java b/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java
index 182757f..21a63c7 100644
--- a/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java
+++ b/phoenix-core/src/test/java/org/apache/phoenix/parse/QueryParserTest.java
@@ -635,19 +635,6 @@ public class QueryParserTest {
     }
 
     @Test
-    public void testInvalidUpsertSelectHint() throws Exception {
-        String sql = (
-                (
-                        "upsert into t select /*+ NO_INDEX */ k from t where k in ( 1,2 )"));
-        try {
-            parseQuery(sql);
-            fail();
-        } catch (SQLException e) {
-            assertEquals(SQLExceptionCode.PARSER_ERROR.getErrorCode(), e.getErrorCode());
-        }
-    }
-
-    @Test
     public void testTableNameStartsWithUnderscore() throws Exception {
         String sql = (
                 (