You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ja...@apache.org on 2014/01/27 23:16:06 UTC

[50/51] [partial] Initial commit of master branch from github

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/.gitignore
----------------------------------------------------------------------
diff --git a/.gitignore b/.gitignore
index 864c428..21a2c92 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,6 +5,7 @@
 
 # eclipse stuffs
 .settings/*
+*/.settings/
 .classpath
 .project
 

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/bin/csv-bulk-loader.sh
----------------------------------------------------------------------
diff --git a/bin/csv-bulk-loader.sh b/bin/csv-bulk-loader.sh
index 9b05e17..ded249e 100755
--- a/bin/csv-bulk-loader.sh
+++ b/bin/csv-bulk-loader.sh
@@ -36,5 +36,5 @@
 # -error                         Ignore error while reading rows from CSV ? (1 - YES | 0 - NO, defaults to 1) (optional)
 # -help                          Print all options (optional)
 
-phoenix_client_jar=$(find ../target/phoenix-*-client.jar)
+phoenix_client_jar=$(find ../phoenix-assembly/target/phoenix-*-client.jar)
 java -cp "$phoenix_client_jar" org.apache.phoenix.map.reduce.CSVBulkLoader "$@"

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/bin/performance.sh
----------------------------------------------------------------------
diff --git a/bin/performance.sh b/bin/performance.sh
index 24928aa..43ebaab 100755
--- a/bin/performance.sh
+++ b/bin/performance.sh
@@ -23,6 +23,7 @@
 
 # Note: This script is tested on Linux environment only. It should work on any Unix platform but is not tested.
 
+
 # command line arguments
 zookeeper=$1
 rowcount=$2
@@ -36,9 +37,9 @@ statements=""
 
 # Phoenix client jar. To generate new jars: $ mvn package -DskipTests
 current_dir=$(cd $(dirname $0);pwd)
-phoenix_jar_path="$current_dir/../target"
+phoenix_jar_path="$current_dir/../phoenix-assembly/target"
 phoenix_client_jar=$(find $phoenix_jar_path/phoenix-*-client.jar)
-testjar="$phoenix_jar_path/phoenix-*-tests.jar"
+testjar="$current_dir/../phoenix-core/target/phoenix-*-tests.jar"
 
 # HBase configuration folder path (where hbase-site.xml reside) for HBase/Phoenix client side property override
 hbase_config_path="$current_dir"

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/bin/psql.sh
----------------------------------------------------------------------
diff --git a/bin/psql.sh b/bin/psql.sh
index 316171a..c17f7c3 100755
--- a/bin/psql.sh
+++ b/bin/psql.sh
@@ -23,7 +23,7 @@
 
 # Phoenix client jar. To generate new jars: $ mvn package -DskipTests
 current_dir=$(cd $(dirname $0);pwd)
-phoenix_jar_path="$current_dir/../target"
+phoenix_jar_path="$current_dir/../phoenix-assembly/target"
 phoenix_client_jar=$(find $phoenix_jar_path/phoenix-*-client.jar)
 
 # HBase configuration folder path (where hbase-site.xml reside) for HBase/Phoenix client side property override

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/bin/sqlline.sh
----------------------------------------------------------------------
diff --git a/bin/sqlline.sh b/bin/sqlline.sh
index 6abda20..066f384 100755
--- a/bin/sqlline.sh
+++ b/bin/sqlline.sh
@@ -23,7 +23,7 @@
 
 # Phoenix client jar. To generate new jars: $ mvn package -DskipTests
 current_dir=$(cd $(dirname $0);pwd)
-phoenix_jar_path="$current_dir/../target"
+phoenix_jar_path="$current_dir/../phoenix-assembly/target"
 phoenix_client_jar=$(find $phoenix_jar_path/phoenix-*-client.jar)
 
 
@@ -36,4 +36,4 @@ if [ "$2" ]
   then sqlfile="--run=$2";
 fi
 
-java -cp ".:$phoenix_client_jar" -Dlog4j.configuration=file:$current_dir/log4j.properties sqlline.SqlLine -d org.apache.phoenix.jdbc.PhoenixDriver -u jdbc:phoenix:$1 -n none -p none --color=true --fastConnect=false --silent=true --verbose=false --isolation=TRANSACTION_READ_COMMITTED $sqlfile
+java -cp ".:$phoenix_client_jar" -Dlog4j.configuration=file:$current_dir/log4j.properties sqlline.SqlLine -d org.apache.phoenix.jdbc.PhoenixDriver -u jdbc:phoenix:$1 -n none -p none --color=true --fastConnect=false --verbose=true --isolation=TRANSACTION_READ_COMMITTED $sqlfile

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/bin/upgradeTo2.sh
----------------------------------------------------------------------
diff --git a/bin/upgradeTo2.sh b/bin/upgradeTo2.sh
index d2b9ec2..c2a4a0d 100755
--- a/bin/upgradeTo2.sh
+++ b/bin/upgradeTo2.sh
@@ -21,10 +21,9 @@
 #
 ############################################################################
 
-
 # Phoenix client jar. To generate new jars: $ mvn package -DskipTests
 current_dir=$(cd $(dirname $0);pwd)
-phoenix_jar_path="$current_dir/../target"
+phoenix_jar_path="$current_dir/../phoenix-assembly/target"
 phoenix_client_jar=$(find $phoenix_jar_path/phoenix-*-client.jar)
 
 # HBase configuration folder path (where hbase-site.xml reside) for HBase/Phoenix client side property override

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/build.txt
----------------------------------------------------------------------
diff --git a/build.txt b/build.txt
index a28a798..ca3304d 100644
--- a/build.txt
+++ b/build.txt
@@ -1,11 +1,38 @@
-# Building Phoenix
-================
+############################################################################
+#
+# Copyright 2010 The Apache Software Foundation
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+############################################################################
+
+
+# Building Apache Phoenix
+=========================
 
 Phoenix uses Maven (3.X) to build all its necessary resources. 
 
 ## Building from source
 =======================
 
+On first setup, you may need to run
+	$ mvn install -DskipTests
+to install the local jars. This is a side-effect of multi-module maven projects
+
 1. To re-generate the antlr based files:
 	$ mvn process-sources
 
@@ -31,8 +58,10 @@ Use the m2e eclipse plugin and do Import->Maven Project and just pick the root '
 ===========
 Findbugs report is generated in /target/site
 	$ mvn site
+	
 
 ## Generate Apache Web Site
 ===========================
-	$ mvn clean site -Ddependency.locations.enabled=false
-	
+	$ mvn -pl phoenix-core site -Ddependency.locations.enabled=false
+
+Note: site is generated in phoenix-core/target/site

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/config/apache-access-logs.properties
----------------------------------------------------------------------
diff --git a/config/apache-access-logs.properties b/config/apache-access-logs.properties
new file mode 100644
index 0000000..2733cba
--- /dev/null
+++ b/config/apache-access-logs.properties
@@ -0,0 +1,35 @@
+###########################
+# Configuration for Flume
+##########################
+# TODO: Should be moved into the flume folder, but its a little bit of a pain in the assembly configuration
+# for now, just leaving it here - jyates
+#
+# configurationforagent
+agent.sources=spooling-source
+agent.sinks=phoenix-sink
+agent.channels=memoryChannel
+# configurationforchannel
+agent.channels.memoryChannel.type=memory
+agent.channels.memoryChannel.transactionCapacity=100
+agent.channels.memoryChannel.byteCapacityBufferPercentage=20
+# configurationforsource
+agent.sources.spooling-source.type=spooldir
+agent.sources.spooling-source.channels=memoryChannel
+agent.sources.spooling-source.spoolDir=/opt/logs
+# configurationforinterceptor
+agent.sources.spooling-source.interceptors=i1
+agent.sources.spooling-source.interceptors.i1.type=host
+agent.sources.spooling-source.interceptors.i1.hostHeader=f_host
+# configurationforsink
+agent.sinks.phoenix-sink.type=org.apache.phoenix.flume.sink.PhoenixSink
+agent.sinks.phoenix-sink.channel=memoryChannel
+agent.sinks.phoenix-sink.batchSize=100
+agent.sinks.phoenix-sink.table=APACHE_LOGS
+agent.sinks.phoenix-sink.ddl=CREATETABLEIFNOTEXISTSAPACHE_LOGS(uidVARCHARNOTNULL,hostVARCHAR,identityVARCHAR,userVARCHAR,timeVARCHAR,methodVARCHAR,requestVARCHAR,protocolVARCHAR,statusINTEGER,sizeINTEGER,refererVARCHAR,agentVARCHAR,f_hostVARCHARCONSTRAINTpkPRIMARYKEY(uid))
+agent.sinks.phoenix-sink.zookeeperQuorum=localhost
+agent.sinks.phoenix-sink.serializer=REGEX
+agent.sinks.phoenix-sink.serializer.rowkeyType=uuid
+agent.sinks.phoenix-sink.serializer.regex=([^]*)([^]*)([^]*)(-|\\[[^\\]]*\\])\"([^]+)([^]+)([^\"]+)\"(-|[0-9]*)(-|[0-9]*)(?:([^\"]*|\"[^\"]*\")([^\"]*|\"[^\"]*\"))?
+agent.sinks.phoenix-sink.serializer.columns=host,identity,user,time,method,request,protocol,status,size,referer,agent
+agent.sinks.phoenix-sink.serializer.headers=f_host
+

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/config/csv-bulk-load-config.properties
----------------------------------------------------------------------
diff --git a/config/csv-bulk-load-config.properties b/config/csv-bulk-load-config.properties
new file mode 100644
index 0000000..2d81808
--- /dev/null
+++ b/config/csv-bulk-load-config.properties
@@ -0,0 +1,5 @@
+mapreduce.map.output.compress=true
+mapreduce.map.output.compress.codec=org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.CompressionCodec
+io.sort.record.percent=0.2
+io.sort.factor=20
+mapred.tasktracker.map.tasks.maximum=10

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/dev/eclipse_prefs_phoenix.epf
----------------------------------------------------------------------
diff --git a/dev/eclipse_prefs_phoenix.epf b/dev/eclipse_prefs_phoenix.epf
index dd9e8f8..fb8df40 100644
--- a/dev/eclipse_prefs_phoenix.epf
+++ b/dev/eclipse_prefs_phoenix.epf
@@ -800,7 +800,7 @@ file_export_version=3.0
 /instance/org.eclipse.jdt.ui/org.eclipse.jdt.ui.packages.linktoeditor=true
 /instance/org.eclipse.jdt.ui/org.eclipse.jdt.ui.staticondemandthreshold=99
 /instance/org.eclipse.jdt.ui/org.eclipse.jdt.ui.text.code_templates_migrated=true
-/instance/org.eclipse.jdt.ui/org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8"?><templates><template autoinsert\="true" context\="gettercomment_context" deleted\="false" description\="Comment for getter method" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.gettercomment" name\="gettercomment">/**\n * @return Returns the ${bare_field_name}.\n */</template><template autoinsert\="true" context\="settercomment_context" deleted\="false" description\="Comment for setter method" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.settercomment" name\="settercomment">/**\n * @param ${param} The ${bare_field_name} to set.\n */</template><template autoinsert\="true" context\="typecomment_context" deleted\="false" description\="Comment for created types" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.typecomment" name\="typecomment">/**\n * Describe your class here.\n *\n * @author ${user}\n * @since 138\n */</template><templat
 e autoinsert\="true" context\="fieldcomment_context" deleted\="false" description\="Comment for fields" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.fieldcomment" name\="fieldcomment">/**\n * Comment for &lt;code&gt;${field}&lt;/code&gt;\n */</template></templates>
+/instance/org.eclipse.jdt.ui/org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?><templates><template autoinsert\="true" context\="gettercomment_context" deleted\="false" description\="Comment for getter method" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.gettercomment" name\="gettercomment">/**\n * @return Returns the ${bare_field_name}.\n */</template><template autoinsert\="true" context\="settercomment_context" deleted\="false" description\="Comment for setter method" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.settercomment" name\="settercomment">/**\n * @param ${param} The ${bare_field_name} to set.\n */</template><template autoinsert\="true" context\="typecomment_context" deleted\="false" description\="Comment for created types" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.typecomment" name\="typecomment">/**\n * Describe your class here.\n *\n * @author ${user}\n * @since 138\n */</
 template><template autoinsert\="true" context\="fieldcomment_context" deleted\="false" description\="Comment for fields" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.fieldcomment" name\="fieldcomment">/**\n * Comment for &lt;code&gt;${field}&lt;/code&gt;\n */</template><template autoinsert\="false" context\="newtype_context" deleted\="false" description\="Newly created files" enabled\="true" id\="org.eclipse.jdt.ui.text.codetemplates.newtype" name\="newtype">/*\n * Copyright 2010 The Apache Software Foundation\n *\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements.  See the NOTICE file\n *distributed with this work for additional information\n * regarding copyright ownership.  The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * "License"); you maynot use this file except in compliance\n * with the License.  You may obtain a copy of the License at\n *\n * http://www.apache.org/licens
 es/LICENSE-2.0\n *\n * Unless required by applicablelaw or agreed to in writing, software\n * distributed under the License is distributed on an "AS IS" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n${package_declaration}\n\n${typecomment}\n${type_declaration}</template></templates>
 /instance/org.eclipse.jdt.ui/org.eclipse.jdt.ui.text.custom_templates=<?xml version\="1.0" encoding\="UTF-8"?><templates/>
 /instance/org.eclipse.jdt.ui/org.eclipse.jdt.ui.text.templates_migrated=true
 /instance/org.eclipse.jdt.ui/proposalOrderMigrated=true

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/phoenix-assembly/pom.xml b/phoenix-assembly/pom.xml
new file mode 100644
index 0000000..9a89206
--- /dev/null
+++ b/phoenix-assembly/pom.xml
@@ -0,0 +1,115 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache</groupId>
+    <artifactId>phoenix</artifactId>
+    <version>3.0.0-SNAPSHOT</version>
+  </parent>
+  <artifactId>phoenix-assembly</artifactId>
+  <name>Phoenix Assebmly</name>
+  <description>Assemble Phoenix artifacts</description>
+  <packaging>pom</packaging>
+
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>client</id>
+            <phase>package</phase>
+            <goals>
+              <goal>single</goal>
+            </goals>
+            <configuration>
+              <attach>false</attach>
+              <finalName>phoenix-${project.version}</finalName>
+              <archive>
+                <index>true</index>
+                <manifest>
+                  <addClasspath>true</addClasspath>
+                  <mainClass>org.apache.phoenix.util.PhoenixRuntime</mainClass>
+                  <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
+                  <addDefaultSpecificationEntries>true</addDefaultSpecificationEntries>
+                </manifest>
+              </archive>
+              <descriptors>
+                <descriptor>src/build/client.xml</descriptor>
+              </descriptors>
+            </configuration>
+          </execution>
+          <execution>
+            <id>package-to-tar</id>
+            <phase>package</phase>
+            <goals>
+              <goal>single</goal>
+            </goals>
+            <configuration>
+            <finalName>phoenix-${project.version}</finalName>
+              <attach>false</attach>
+              <tarLongFileMode>gnu</tarLongFileMode>
+              <appendAssemblyId>false</appendAssemblyId>
+              <descriptors>
+                <descriptor>src/build/all.xml</descriptor>
+              </descriptors>
+            </configuration>
+          </execution>
+          <execution>
+            <id>client-minimal</id>
+            <phase>package</phase>
+            <goals>
+              <goal>single</goal>
+            </goals>
+            <configuration>
+         <finalName>phoenix-${project.version}</finalName>
+              <attach>false</attach>
+              <appendAssemblyId>true</appendAssemblyId>
+              <descriptors>
+               <!--build the phoenix client jar, but without HBase code. -->
+                <descriptor>src/build/client-without-hbase.xml</descriptor>
+               <!-- build the phoenix client jar, but without HBase (or its depenencies). -->
+                <descriptor>src/build/client-minimal.xml</descriptor>
+              </descriptors>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <!-- No jars created for this module -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>prepare-package</phase>
+            <goals />
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <!-- Depend on all other internal projects -->
+    <dependency>
+      <groupId>org.apache</groupId>
+      <artifactId>phoenix-core</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache</groupId>
+      <artifactId>phoenix-hadoop-compat</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache</groupId>
+      <artifactId>${compat.module}</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache</groupId>
+      <artifactId>phoenix-flume</artifactId>
+    </dependency>
+        <dependency>
+      <groupId>org.apache</groupId>
+      <artifactId>phoenix-pig</artifactId>
+    </dependency>
+  </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-assembly/src/build/all.xml
----------------------------------------------------------------------
diff --git a/phoenix-assembly/src/build/all.xml b/phoenix-assembly/src/build/all.xml
new file mode 100644
index 0000000..36910d5
--- /dev/null
+++ b/phoenix-assembly/src/build/all.xml
@@ -0,0 +1,139 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+  <!--This 'all' id is not appended to the produced bundle because we do this: http://maven.apache.org/plugins/maven-assembly-plugin/faq.html#required-classifiers -->
+  <id>all</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <includeBaseDirectory>true</includeBaseDirectory>
+  
+  <moduleSets>
+    <moduleSet>
+      <!-- Enable access to all projects in the current multimodule build. Eclipse 
+        says this is an error, but builds from the command line just fine. -->
+      <useAllReactorProjects>true</useAllReactorProjects>
+       <!-- Include all the sources in the top directory -->
+      <sources>
+         <fileSets>
+          <fileSet>
+            <!-- Make sure this excludes is same as the phoenix-hadoop2-compat
+                 excludes below -->
+            <excludes>
+              <exclude>target/</exclude>
+              <exclude>test/</exclude>
+              <exclude>.classpath</exclude>
+              <exclude>.project</exclude>
+              <exclude>.settings/</exclude>
+            </excludes>
+          </fileSet>
+        </fileSets>
+      </sources>
+      <!-- Binaries for the dependencies also go in the lib directory -->
+      <binaries>
+        <outputDirectory>lib</outputDirectory>
+        <unpack>false</unpack>
+      </binaries>
+    </moduleSet>
+  </moduleSets>
+
+  <fileSets>
+    <!--This one is weird.  When we assemble src, it'll be default profile which
+         at the moment is hadoop1.  But we should include the hadoop2 compat module
+         too so can build hadoop2 from src -->
+    <fileSet>
+      <directory>${project.basedir}/..</directory>
+      <fileMode>0644</fileMode>
+      <directoryMode>0755</directoryMode>
+      <includes>
+        <include>phoenix-*</include>
+        </includes>
+            <excludes>
+              <exclude>target/</exclude>
+              <exclude>test/</exclude>
+              <exclude>.classpath</exclude>
+              <exclude>.project</exclude>
+              <exclude>.settings/</exclude>
+            </excludes>
+    </fileSet>
+    <fileSet>
+      <!--Get misc project files -->
+      <directory>${project.basedir}/..</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>*.txt</include>
+        <include>*.md</include>
+        <include>pom.xml</include>
+      </includes>
+    </fileSet>
+    <!-- Top level directories -->
+    <fileSet>
+      <directory>${project.basedir}/../bin</directory>
+      <outputDirectory>bin</outputDirectory>
+      <fileMode>0644</fileMode>
+      <directoryMode>0755</directoryMode>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/../dev</directory>
+      <fileMode>0644</fileMode>
+      <directoryMode>0755</directoryMode>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/../docs</directory>
+      <fileMode>0644</fileMode>
+      <directoryMode>0755</directoryMode>
+    </fileSet>
+      <fileSet>
+      <directory>${project.basedir}/../examples</directory>
+      <fileMode>0644</fileMode>
+      <directoryMode>0755</directoryMode>
+    </fileSet>
+    <!-- Add the client jar. Expects the client jar packaging phase to already be run, 
+      which is determined by specification order in the pom. -->
+    <fileSet>
+      <directory>target</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>phoenix-*-client.jar</include>
+      </includes>
+    </fileSet>
+  </fileSets>
+
+  <!-- And add all of our dependencies -->
+  <dependencySets>
+    <dependencySet>
+      <!-- Unpack all the dependencies to class files, since java doesn't support 
+        jar of jars for running -->
+      <unpack>false</unpack>
+      <outputDirectory>/lib</outputDirectory>
+      <includes>
+        <include>commons-configuration:commons-configuration</include>
+        <include>commons-io:commons-io</include>
+        <include>commons-lang:commons-lang</include>
+        <include>commons-logging:commons-logging</include>
+        <include>com.google.guava:guava</include>
+        <include>org.apache.hadoop:hadoop*</include>
+        <include>com.google.protobuf:protobuf-java</include>
+        <include>org.slf4j:slf4j-api</include>
+        <include>org.slf4j:slf4j-log4j12</include>
+        <include>org.apache.zookeeper:zookeeper</include>
+        <include>log4j:log4j</include>
+        <include>org.apache.hbase:hbase*</include>
+        <include>net.sf.opencsv:opencsv</include>
+        <include>org.antlr:antlr</include>
+      </includes>
+    </dependencySet>
+    <!-- Separate dependency set to just pull in the jackson stuff since its test 
+      scoped and we only include 'runtime' scoped (which includes compile) dependencies -->
+    <dependencySet>
+      <unpack>false</unpack>
+      <scope>test</scope>
+      <!-- save these dependencies to the top-level -->
+      <outputDirectory>/lib</outputDirectory>
+      <includes>
+        <include>org.codehaus.jackson:jackson-core-asl</include>
+        <include>org.codehaus.jackson:jackson-mapper-asl</include>
+      </includes>
+    </dependencySet>
+  </dependencySets>
+</assembly>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-assembly/src/build/client-minimal.xml
----------------------------------------------------------------------
diff --git a/phoenix-assembly/src/build/client-minimal.xml b/phoenix-assembly/src/build/client-minimal.xml
new file mode 100644
index 0000000..3e6e4e9
--- /dev/null
+++ b/phoenix-assembly/src/build/client-minimal.xml
@@ -0,0 +1,16 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+  <!-- Often clients want to use Phoenix in an existing HBase environment (they have 
+    their own HBase version already built), so the standard HBase jar shouldn't be included 
+    (as with the regular client jar) as it will conflict the installed version. This 
+    profile does the same thing as the client.xml build, but excludes the hbase stuff. -->
+  <id>client-minimal</id>
+  <formats>
+    <format>jar</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <componentDescriptors>
+    <componentDescriptor>src/build/components-minimal.xml</componentDescriptor>
+  </componentDescriptors>
+</assembly>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-assembly/src/build/client-without-hbase.xml
----------------------------------------------------------------------
diff --git a/phoenix-assembly/src/build/client-without-hbase.xml b/phoenix-assembly/src/build/client-without-hbase.xml
new file mode 100644
index 0000000..2933715
--- /dev/null
+++ b/phoenix-assembly/src/build/client-without-hbase.xml
@@ -0,0 +1,18 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+  <!-- Often clients want to use Phoenix in an existing HBase environment (they have 
+    their own HBase version already built), so the standard HBase jar shouldn't be included 
+    (as with the regular client jar) as it will conflict the installed version. This 
+    profile does the same thing as the client.xml build, but excludes the hbase stuff. -->
+  <id>client-without-hbase</id>
+  <formats>
+    <format>jar</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+
+  <componentDescriptors>
+    <componentDescriptor>src/build/components-minimal.xml</componentDescriptor>
+    <componentDescriptor>src/build/components-major-client.xml</componentDescriptor>
+  </componentDescriptors>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-assembly/src/build/client.xml
----------------------------------------------------------------------
diff --git a/phoenix-assembly/src/build/client.xml b/phoenix-assembly/src/build/client.xml
new file mode 100644
index 0000000..3be4949
--- /dev/null
+++ b/phoenix-assembly/src/build/client.xml
@@ -0,0 +1,41 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+  <id>client</id>
+  <!-- All the dependencies (unpacked) necessary to run phoenix from a single, stand-alone jar -->
+  <formats>
+    <format>jar</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  
+  <componentDescriptors>
+    <componentDescriptor>src/build/components-minimal.xml</componentDescriptor>
+    <componentDescriptor>src/build/components-major-client.xml</componentDescriptor>
+  </componentDescriptors>
+
+  <!-- Unpack all the modules into the target jar -->
+  <moduleSets>
+    <moduleSet>
+      <useAllReactorProjects>true</useAllReactorProjects>
+      <binaries>
+      <outputDirectory>/</outputDirectory>
+        <unpack>true</unpack>
+      </binaries>
+    </moduleSet>
+  </moduleSets>
+
+  <dependencySets>
+    <dependencySet>
+      <!-- Unpack all the dependencies to class files, since java doesn't support 
+        jar of jars for running -->
+      <unpack>true</unpack>
+      <!-- save these dependencies to the top-level -->
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>jline:jline</include>
+        <include>sqlline:sqlline</include>
+        <include>org.apache.hbase:hbase*</include>
+      </includes>
+    </dependencySet>
+  </dependencySets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-assembly/src/build/components-major-client.xml
----------------------------------------------------------------------
diff --git a/phoenix-assembly/src/build/components-major-client.xml b/phoenix-assembly/src/build/components-major-client.xml
new file mode 100644
index 0000000..bdbf701
--- /dev/null
+++ b/phoenix-assembly/src/build/components-major-client.xml
@@ -0,0 +1,32 @@
+<component>
+  <!-- Components that the client needs (except for HBase) -->
+  <dependencySets>
+    <dependencySet>
+      <!-- Unpack all the dependencies to class files, since java doesn't support 
+        jar of jars for running -->
+      <unpack>true</unpack>
+      <!-- save these dependencies to the top-level -->
+      <outputDirectory>/</outputDirectory>
+      <!-- Maybe a blacklist is easier? -->
+      <includes>
+        <!-- We use a newer version of guava than HBase - this might be an issue? -->
+        <include>com.google.guava:guava</include>
+        <!-- HBase also pulls in these dependencies on its own, should we include-them? -->
+        <include>com.google.protobuf:protobuf-java</include>
+        <include>org.slf4j:slf4j-api</include>
+        <include>org.slf4j:slf4j-log4j12</include>
+        <include>org.apache.zookeeper:zookeeper</include>
+        <include>log4j:log4j</include>
+        <include>org.apache.hadoop:hadoop*</include>
+        <include>commons-configuration:commons-configuration</include>
+        <include>commons-io:commons-io</include>
+        <include>commons-logging:commons-logging</include>
+        <include>commons-lang:commons-lang</include>
+        <include>commons-cli:commons-cli</include>
+        <include>org.codehaus.jackson:jackson-mapper-asl</include>
+        <include>org.codehaus.jackson:jackson-core-asl</include>
+        <include>org.xerial.snappy:snappy-java</include>
+      </includes>
+    </dependencySet>
+  </dependencySets>
+</component>

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-assembly/src/build/components-minimal.xml
----------------------------------------------------------------------
diff --git a/phoenix-assembly/src/build/components-minimal.xml b/phoenix-assembly/src/build/components-minimal.xml
new file mode 100644
index 0000000..172e398
--- /dev/null
+++ b/phoenix-assembly/src/build/components-minimal.xml
@@ -0,0 +1,47 @@
+<component>
+  <!-- Just the basic components that Phoenix pulls in, that is not a transitive dependency from Hadoop/HBase/Pig -->
+  <dependencySets>
+    <dependencySet>
+      <!-- Unpack all the dependencies to class files, since java doesn't support 
+        jar of jars for running -->
+      <unpack>true</unpack>
+      <!-- save these dependencies to the top-level -->
+      <outputDirectory>/</outputDirectory>
+      <!-- Just include the extra things that phoenix needs -->
+      <includes>
+        <include>net.sf.opencsv:opencsv</include>
+        <include>org.antlr:antlr*</include>
+      </includes>
+    </dependencySet>
+
+    <dependencySet>
+      <outputDirectory>/</outputDirectory>
+      <unpack>true</unpack>
+      <scope>system</scope>
+    </dependencySet>
+  </dependencySets>
+
+  <fileSets>
+    <fileSet>
+      <!--Get misc project files -->
+      <directory>${project.basedir}/..</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>*.txt*</include>
+        <include>*.md</include>
+        <include>NOTICE*</include>
+      </includes>
+      <excludes>
+        <exclude>build.txt</exclude>
+      </excludes>
+    </fileSet>
+    <fileSet>
+      <!--Get map-red-config properties files -->
+      <directory>${project.basedir}/../config</directory>
+      <outputDirectory>/</outputDirectory>
+      <includes>
+        <include>csv-bulk-load-config.properties</include>
+      </includes>
+    </fileSet>
+  </fileSets>
+</component>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-core/pom.xml
----------------------------------------------------------------------
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
new file mode 100644
index 0000000..0e2b8e5
--- /dev/null
+++ b/phoenix-core/pom.xml
@@ -0,0 +1,319 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache</groupId>
+    <artifactId>phoenix</artifactId>
+    <version>3.0.0-SNAPSHOT</version>
+  </parent>
+  <artifactId>phoenix-core</artifactId>
+  <name>Phoenix Core</name>
+  <description>Core Phoenix codebase</description>
+
+  <licenses>
+      <license>
+          <name>The Apache Software License, Version 2.0</name>
+          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+          <distribution>repo</distribution>
+          <comments />
+      </license>
+  </licenses>
+
+  <organization>
+      <name>Apache Software Foundation</name>
+      <url>http://www.apache.org</url>
+  </organization>
+
+  <build>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <targetPath>META-INF/services</targetPath>
+        <includes>
+          <include>java.sql.Driver</include>
+        </includes>
+      </resource>
+    </resources>
+
+    <plugins>
+      <!-- Add the ant-generated sources to the source path -->
+     <plugin>
+       <groupId>org.apache.maven.plugins</groupId>
+       <artifactId>maven-site-plugin</artifactId>
+       <version>3.2</version>
+       <dependencies>
+        <dependency>
+           <groupId>org.apache.maven.doxia</groupId>
+           <artifactId>doxia-module-markdown</artifactId>
+           <version>1.3</version>
+         </dependency>
+         <dependency>
+           <groupId>lt.velykis.maven.skins</groupId>
+           <artifactId>reflow-velocity-tools</artifactId>
+           <version>1.0.0</version>
+         </dependency>
+         <dependency>
+           <groupId>org.apache.velocity</groupId>
+           <artifactId>velocity</artifactId>
+           <version>1.7</version>
+         </dependency>
+       </dependencies>
+       <configuration>
+         <reportPlugins>
+           <plugin>
+             <groupId>org.codehaus.mojo</groupId>
+             <artifactId>findbugs-maven-plugin</artifactId>
+	         <version>2.5.2</version>
+           </plugin>
+         </reportPlugins>
+       </configuration>
+     </plugin>
+     <plugin>
+       <artifactId>exec-maven-plugin</artifactId>
+       <groupId>org.codehaus.mojo</groupId>
+       <version>1.2.1</version>
+       <executions>
+        <execution><!-- Run our version calculation script -->
+          <id>Merge Language Reference</id>
+           <phase>site</phase>
+           <goals>
+             <goal>exec</goal>
+           </goals>
+           <configuration>
+             <executable>${basedir}/src/site/bin/merge.sh</executable>
+           </configuration>
+         </execution>
+       </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <version>${maven-build-helper-plugin.version}</version>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>${antlr-output.dir}</source>
+                <source>${antlr-input.dir}</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <!-- Compile the antlr sources -->
+      <plugin>
+        <groupId>org.antlr</groupId>
+        <artifactId>antlr3-maven-plugin</artifactId>
+        <version>3.5</version>
+        <executions>
+          <execution>
+            <goals>
+              <goal>antlr</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <outputDirectory>${antlr-output.dir}/org/apache/phoenix/parse</outputDirectory>
+        </configuration>
+      </plugin>
+      <!-- Run with -Dmaven.test.skip.exec=true to build -tests.jar without running 
+        tests (this is needed for upstream projects whose tests need this jar simply for 
+        compilation) -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>prepare-package
+            </phase>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+            <configuration>
+              <archive>
+                <manifest>
+                  <mainClass>org.apache.phoenix.util.GeneratePerformanceData</mainClass>
+                </manifest>
+              </archive>
+            </configuration>
+          </execution>
+        </executions>
+        <configuration>
+          <!-- Exclude these 2 packages, because their dependency _binary_ files 
+            include the sources, and Maven 2.2 appears to add them to the sources to compile, 
+            weird -->
+          <excludes>
+            <exclude>org/apache/jute/**</exclude>
+            <exclude>org/apache/zookeeper/**</exclude>
+            <exclude>**/*.jsp</exclude>
+            <exclude>log4j.properties</exclude>
+          </excludes>
+        </configuration>
+      </plugin>
+      <!-- Setup eclipse -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-eclipse-plugin</artifactId>
+        <configuration>
+          <buildcommands>
+            <buildcommand>org.jamon.project.templateBuilder</buildcommand>
+            <buildcommand>org.eclipse.jdt.core.javabuilder</buildcommand>
+          </buildcommands>
+        </configuration>
+      </plugin>
+      <plugin>
+        <!--Make it so assembly:single does nothing in here -->
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <skipAssembly>true</skipAssembly>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+    <!-- Intra project dependencies -->
+    <dependency>
+      <groupId>org.apache</groupId>
+      <artifactId>phoenix-hadoop-compat</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache</groupId>
+      <artifactId>phoenix-hadoop-compat</artifactId>
+      <classifier>tests</classifier>
+      <scope>test</scope>
+    </dependency>
+    <!-- Make sure we have all the antlr dependencies -->
+    <dependency>
+      <groupId>org.antlr</groupId>
+      <artifactId>antlr</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.antlr</groupId>
+      <artifactId>antlr-runtime</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>net.sf.opencsv</groupId>
+      <artifactId>opencsv</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>jline</groupId>
+      <artifactId>jline</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>sqlline</groupId>
+      <artifactId>sqlline</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase</artifactId>
+    </dependency>
+    <!-- Findbugs Annotation -->
+    <dependency>
+      <groupId>net.sourceforge.findbugs</groupId>
+      <artifactId>annotations</artifactId>
+    </dependency>
+
+    <!-- Test Dependencies -->
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase</artifactId>
+      <type>test-jar</type>
+    </dependency>
+    <!-- Needed by HBase to run the minicluster -->
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-core-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-mapper-asl</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-jaxrs</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.codehaus.jackson</groupId>
+      <artifactId>jackson-xc</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+    </dependency>
+  </dependencies>
+
+  <profiles>
+    <!-- Profile for building against Hadoop 1. Active by default. Not used if another 
+      Hadoop profile is specified with mvn -Dhadoop.profile=foo -->
+    <profile>
+      <id>hadoop-1</id>
+      <activation>
+        <property>
+          <name>!hadoop.profile</name>
+        </property>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-core</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-test</artifactId>
+        </dependency>
+      </dependencies>
+    </profile>
+
+    <!-- Profile for building against Hadoop 2. Activate using: mvn -Dhadoop.profile=2 -->
+    <profile>
+      <id>hadoop-2</id>
+      <activation>
+        <property>
+          <name>hadoop.profile</name>
+          <value>2</value>
+        </property>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-annotations</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-minicluster</artifactId>
+        </dependency>
+      </dependencies>
+      <build>
+        <plugins>
+          <plugin>
+            <artifactId>maven-dependency-plugin</artifactId>
+            <version>${maven-dependency-plugin.version}</version>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-core/src/main/antlr3/PhoenixSQL.g
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/antlr3/PhoenixSQL.g b/phoenix-core/src/main/antlr3/PhoenixSQL.g
new file mode 100644
index 0000000..5dcfe81
--- /dev/null
+++ b/phoenix-core/src/main/antlr3/PhoenixSQL.g
@@ -0,0 +1,1136 @@
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+grammar PhoenixSQL;
+
+tokens
+{
+    SELECT='select';
+    FROM='from';
+    WHERE='where';
+    NOT='not';
+    AND='and';
+    OR='or';
+    NULL='null';
+    TRUE='true';
+    FALSE='false';
+    LIKE='like';
+    AS='as';
+    OUTER='outer';
+    ON='on';
+    IN='in';
+    GROUP='group';
+    HAVING='having';
+    ORDER='order';
+    BY='by';
+    ASC='asc';
+    DESC='desc';
+    NULLS='nulls';
+    LIMIT='limit';
+    FIRST='first';
+    LAST='last';
+    CASE='case';
+    WHEN='when';
+    THEN='then';
+    ELSE='else';
+    END='end';
+    EXISTS='exists';
+    IS='is';
+    FIRST='first';    
+    DISTINCT='distinct';
+    JOIN='join';
+    INNER='inner';
+    LEFT='left';
+    RIGHT='right';
+    FULL='full';
+    BETWEEN='between';
+    UPSERT='upsert';
+    INTO='into';
+    VALUES='values';
+    DELETE='delete';
+    CREATE='create';
+    DROP='drop';
+    PRIMARY='primary';
+    KEY='key';
+    ALTER='alter';
+    COLUMN='column';
+    TABLE='table';
+    ADD='add';
+    SPLIT='split';
+    EXPLAIN='explain';
+    VIEW='view';
+    IF='if';
+    CONSTRAINT='constraint';
+    TABLES='tables';
+    ALL='all';
+    INDEX='index';
+    INCLUDE='include';
+    WITHIN='within';
+    SET='set';
+    CAST='cast';
+    USABLE='usable';
+    UNUSABLE='unusable';
+    DISABLE='disable';
+    REBUILD='rebuild';
+    ARRAY='array';
+    SEQUENCE='sequence';
+    START='start';
+    WITH='with';
+    INCREMENT='increment';
+    NEXT='next';
+    CURRENT='current';
+    VALUE='value';
+    FOR='for';
+    CACHE='cache';
+    DERIVE='derive';
+}
+
+
+@parser::header {
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.parse;
+
+///CLOVER:OFF
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.ListMultimap;
+import org.apache.hadoop.hbase.util.Pair;
+import java.math.BigDecimal;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Stack;
+import java.sql.SQLException;
+import org.apache.phoenix.expression.function.CountAggregateFunction;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.schema.ColumnModifier;
+import org.apache.phoenix.schema.IllegalDataException;
+import org.apache.phoenix.schema.PDataType;
+import org.apache.phoenix.schema.PIndexState;
+import org.apache.phoenix.schema.PTableType;
+import org.apache.phoenix.util.SchemaUtil;
+}
+
+@lexer::header {
+/**
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.parse;
+///CLOVER:OFF
+}
+
+// --------------------------------------
+// The Parser
+
+@parser::members
+{
+    
+    /**
+     * used to turn '?' binds into : binds.
+     */
+    private int anonBindNum;
+    private ParseNodeFactory factory;
+    private ParseContext.Stack contextStack = new ParseContext.Stack();
+
+    public void setParseNodeFactory(ParseNodeFactory factory) {
+        this.factory = factory;
+    }
+    
+    public boolean isCountFunction(String field) {
+        return CountAggregateFunction.NORMALIZED_NAME.equals(SchemaUtil.normalizeIdentifier(field));
+    }
+     
+    public int line(Token t) {
+        return t.getLine();
+    }
+
+    public int column(Token t) {
+        return t.getCharPositionInLine() + 1;
+    }
+    
+    private void throwRecognitionException(Token t) throws RecognitionException {
+        RecognitionException e = new RecognitionException();
+        e.token = t;
+        e.line = t.getLine();
+        e.charPositionInLine = t.getCharPositionInLine();
+        e.input = input;
+        throw e;
+    }
+    
+    public int getBindCount() {
+        return anonBindNum;
+    }
+    
+    public void resetBindCount() {
+        anonBindNum = 0;
+    }
+    
+    public String nextBind() {
+        return Integer.toString(++anonBindNum);
+    }
+    
+    public void updateBind(String namedBind){
+         int nBind = Integer.parseInt(namedBind);
+         if (nBind > anonBindNum) {
+             anonBindNum = nBind;
+         }
+    }
+
+    protected Object recoverFromMismatchedToken(IntStream input, int ttype, BitSet follow)
+        throws RecognitionException {
+        RecognitionException e = null;
+        // if next token is what we are looking for then "delete" this token
+        if (mismatchIsUnwantedToken(input, ttype)) {
+            e = new UnwantedTokenException(ttype, input);
+        } else if (mismatchIsMissingToken(input, follow)) {
+            Object inserted = getMissingSymbol(input, e, ttype, follow);
+            e = new MissingTokenException(ttype, input, inserted);
+        } else {
+            e = new MismatchedTokenException(ttype, input);
+        }
+        throw e;
+    }
+
+    public Object recoverFromMismatchedSet(IntStream input, RecognitionException e, BitSet follow)
+        throws RecognitionException
+    {
+        throw e;
+    }
+    
+    @Override
+    public String getErrorMessage(RecognitionException e, String[] tokenNames) {
+        if (e instanceof MismatchedTokenException) {
+            MismatchedTokenException mte = (MismatchedTokenException)e;
+            String txt = mte.token.getText();
+            String p = mte.token.getType() == -1 ? "EOF" : PARAPHRASE[mte.token.getType()];
+            String expecting = (mte.expecting < PARAPHRASE.length && mte.expecting >= 0) ? PARAPHRASE[mte.expecting] : null;
+            if (expecting == null) {
+                return "unexpected token (" + line(mte.token) + "," + column(mte.token) + "): " + (txt != null ? txt : p);
+            } else {
+                return "expecting " + expecting +
+                    ", found '" + (txt != null ? txt : p) + "'";
+            }
+        } else if (e instanceof NoViableAltException) {
+            //NoViableAltException nvae = (NoViableAltException)e;
+            return "unexpected token: (" + line(e.token) + "," + column(e.token) + ")" + getTokenErrorDisplay(e.token);
+        }
+        return super.getErrorMessage(e, tokenNames);
+     }
+
+    public String getTokenErrorDisplay(int t) {
+        String ret = PARAPHRASE[t];
+        if (ret == null) ret = "<UNKNOWN>";
+        return ret;
+    }
+
+
+    private String[] PARAPHRASE = new String[getTokenNames().length];
+    {
+        PARAPHRASE[NAME] = "a field or entity name";
+        PARAPHRASE[NUMBER] = "a number";
+        PARAPHRASE[EQ] = "an equals sign";
+        PARAPHRASE[LT] = "a left angle bracket";
+        PARAPHRASE[GT] = "a right angle bracket";
+        PARAPHRASE[COMMA] = "a comma";
+        PARAPHRASE[LPAREN] = "a left parentheses";
+        PARAPHRASE[RPAREN] = "a right parentheses";
+        PARAPHRASE[SEMICOLON] = "a semi-colon";
+        PARAPHRASE[COLON] = "a colon";
+        PARAPHRASE[LSQUARE] = "left square bracket";
+        PARAPHRASE[RSQUARE] = "right square bracket";
+        PARAPHRASE[LCURLY] = "left curly bracket";
+        PARAPHRASE[RCURLY] = "right curly bracket";
+        PARAPHRASE[AT] = "at";
+        PARAPHRASE[MINUS] = "a subtraction";
+        PARAPHRASE[TILDE] = "a tilde";
+        PARAPHRASE[PLUS] = "an addition";
+        PARAPHRASE[ASTERISK] = "an asterisk";
+        PARAPHRASE[DIVIDE] = "a division";
+        PARAPHRASE[FIELDCHAR] = "a field character";
+        PARAPHRASE[LETTER] = "an ansi letter";
+        PARAPHRASE[POSINTEGER] = "a positive integer";
+        PARAPHRASE[DIGIT] = "a number from 0 to 9";
+    }
+}
+
+@rulecatch {
+    catch (RecognitionException re) {
+        throw re;
+    }
+}
+
+@lexer::members {
+
+}
+
+// Used to incrementally parse a series of semicolon-terminated SQL statement
+// Note than unlike the rule below an EOF is not expected at the end.
+nextStatement returns [BindableStatement ret]
+    :  s=oneStatement {$ret = s;} SEMICOLON
+    |  EOF
+    ;
+
+// Parses a single SQL statement (expects an EOF after the select statement).
+statement returns [BindableStatement ret]
+    :   s=oneStatement {$ret = s;} EOF
+    ;
+
+// Parses a select statement which must be the only statement (expects an EOF after the statement).
+query returns [SelectStatement ret]
+    :   SELECT s=hinted_select_node EOF {$ret=s;}
+    ;
+
+// Parses a single SQL statement (expects an EOF after the select statement).
+oneStatement returns [BindableStatement ret]
+    :   (SELECT s=hinted_select_node {$ret=s;} 
+    |    ns=non_select_node {$ret=ns;}
+        )
+    ;
+
+non_select_node returns [BindableStatement ret]
+@init{ contextStack.push(new ParseContext()); }
+    :  (s=upsert_node
+    |   s=delete_node
+    |   s=create_table_node
+    |   s=create_view_node
+    |   s=create_index_node
+    |   s=drop_table_node
+    |   s=drop_index_node
+    |   s=alter_index_node
+    |   s=alter_table_node
+    |	s=create_sequence_node
+    |	s=drop_sequence_node
+    |   s=explain_node) { contextStack.pop();  $ret = s; }
+    ;
+    
+explain_node returns [BindableStatement ret]
+    :   EXPLAIN q=oneStatement {$ret=factory.explain(q);}
+    ;
+
+// Parse a create table statement.
+create_table_node returns [CreateTableStatement ret]
+    :   CREATE TABLE (IF NOT ex=EXISTS)? t=from_table_name 
+        (LPAREN c=column_defs (pk=pk_constraint)? RPAREN)
+        (p=fam_properties)?
+        (SPLIT ON s=list_expressions)?
+        {ret = factory.createTable(t, p, c, pk, s, PTableType.TABLE, ex!=null, null, null, getBindCount()); }
+    ;
+
+// Parse a create view statement.
+create_view_node returns [CreateTableStatement ret]
+    :   CREATE VIEW (IF NOT ex=EXISTS)? t=from_table_name 
+        (LPAREN c=column_defs (pk=pk_constraint)? RPAREN)?
+        ( AS SELECT ASTERISK
+          FROM bt=from_table_name
+          (WHERE w=condition)? )?
+        (p=fam_properties)?
+        { ret = factory.createTable(t, p, c, pk, null, PTableType.VIEW, ex!=null, bt==null ? t : bt, w, getBindCount()); }
+    ;
+
+// Parse a create index statement.
+create_index_node returns [CreateIndexStatement ret]
+    :   CREATE INDEX (IF NOT ex=EXISTS)? i=index_name ON t=from_table_name
+        (LPAREN pk=index_pk_constraint RPAREN)
+        (INCLUDE (LPAREN icrefs=column_names RPAREN))?
+        (p=fam_properties)?
+        (SPLIT ON v=list_expressions)?
+        {ret = factory.createIndex(i, factory.namedTable(null,t), pk, icrefs, v, p, ex!=null, getBindCount()); }
+    ;
+
+// Parse a create sequence statement.
+create_sequence_node returns [CreateSequenceStatement ret]
+    :   CREATE SEQUENCE  (IF NOT ex=EXISTS)? t=from_table_name
+        (START WITH? s=int_literal_or_bind)?
+        (INCREMENT BY? i=int_literal_or_bind)?
+        (CACHE c=int_literal_or_bind)?
+    { $ret = factory.createSequence(t, s, i, c, ex!=null, getBindCount()); }
+    ;
+
+int_literal_or_bind returns [ParseNode ret]
+    : n=int_literal { $ret = n; }
+    | b=bind_expression { $ret = b; }
+    ;
+
+// Parse a drop sequence statement.
+drop_sequence_node returns [DropSequenceStatement ret]
+    :   DROP SEQUENCE  (IF ex=EXISTS)? t=from_table_name
+    { $ret = factory.dropSequence(t, ex!=null, getBindCount()); }
+    ;
+
+pk_constraint returns [PrimaryKeyConstraint ret]
+    :   COMMA? CONSTRAINT n=identifier PRIMARY KEY LPAREN cols=col_name_with_mod_list RPAREN { $ret = factory.primaryKey(n,cols); }
+    ;
+
+col_name_with_mod_list returns [List<Pair<ColumnName, ColumnModifier>> ret]
+@init{ret = new ArrayList<Pair<ColumnName, ColumnModifier>>(); }
+    :   p=col_name_with_mod {$ret.add(p);}  (COMMA p = col_name_with_mod {$ret.add(p);} )*
+;
+
+col_name_with_mod returns [Pair<ColumnName, ColumnModifier> ret]
+    :   f=identifier (order=ASC|order=DESC)? {$ret = Pair.newPair(factory.columnName(f), order == null ? null : ColumnModifier.fromDDLValue(order.getText()));}
+;
+
+index_pk_constraint returns [PrimaryKeyConstraint ret]
+    :   cols = col_def_name_with_mod_list {$ret = factory.primaryKey(null, cols); }
+    ;
+
+col_def_name_with_mod_list returns [List<Pair<ColumnName, ColumnModifier>> ret]
+@init{ret = new ArrayList<Pair<ColumnName, ColumnModifier>>(); }
+    :   p=col_def_name_with_mod {$ret.add(p);}  (COMMA p = col_def_name_with_mod {$ret.add(p);} )*
+;
+
+col_def_name_with_mod returns [Pair<ColumnName, ColumnModifier> ret]
+    :   c=column_name (order=ASC|order=DESC)? {$ret = Pair.newPair(c, order == null ? null : ColumnModifier.fromDDLValue(order.getText()));}
+;
+
+fam_properties returns [ListMultimap<String,Pair<String,Object>> ret]
+@init{ret = ArrayListMultimap.<String,Pair<String,Object>>create(); }
+    :  p=fam_prop_name EQ v=prop_value {$ret.put(p.getFamilyName(),new Pair<String,Object>(p.getPropertyName(),v));}  (COMMA p=fam_prop_name EQ v=prop_value {$ret.put(p.getFamilyName(),new Pair<String,Object>(p.getPropertyName(),v));} )*
+    ;
+
+fam_prop_name returns [PropertyName ret]
+    :   propName=identifier {$ret = factory.propertyName(propName); }
+    |   familyName=identifier DOT propName=identifier {$ret = factory.propertyName(familyName, propName); }
+    ;
+    
+prop_value returns [Object ret]
+    :   l=literal { $ret = l.getValue(); }
+    ;
+    
+column_name returns [ColumnName ret]
+    :   field=identifier {$ret = factory.columnName(field); }
+    |   family=identifier DOT field=identifier {$ret = factory.columnName(family, field); }
+    ;
+
+column_names returns [List<ColumnName> ret]
+@init{ret = new ArrayList<ColumnName>(); }
+    :  v = column_name {$ret.add(v);}  (COMMA v = column_name {$ret.add(v);} )*
+;
+
+	
+// Parse a drop table statement.
+drop_table_node returns [DropTableStatement ret]
+    :   DROP (v=VIEW | TABLE) (IF ex=EXISTS)? t=from_table_name
+        {ret = factory.dropTable(t, v==null ? PTableType.TABLE : PTableType.VIEW, ex!=null); }
+    ;
+
+// Parse a drop index statement
+drop_index_node returns [DropIndexStatement ret]
+    : DROP INDEX (IF ex=EXISTS)? i=index_name ON t=from_table_name
+      {ret = factory.dropIndex(i, t, ex!=null); }
+    ;
+
+// Parse a alter index statement
+alter_index_node returns [AlterIndexStatement ret]
+    : ALTER INDEX (IF ex=EXISTS)? i=index_name ON t=from_table_name s=(USABLE | UNUSABLE | REBUILD | DISABLE)
+      {ret = factory.alterIndex(factory.namedTable(null,factory.table(t.getSchemaName(),i.getName())), t.getTableName(), ex!=null, PIndexState.valueOf(SchemaUtil.normalizeIdentifier(s.getText()))); }
+    ;
+
+// Parse an alter table statement.
+alter_table_node returns [AlterTableStatement ret]
+    :   ALTER (TABLE | v=VIEW) t=from_table_name
+        ( (DROP COLUMN (IF ex=EXISTS)? c=column_names) | (ADD (IF NOT ex=EXISTS)? (d=column_defs) (p=properties)?) | (SET (p=properties)) )
+        { PTableType tt = v==null ? PTableType.TABLE : PTableType.VIEW; ret = ( c == null ? factory.addColumn(factory.namedTable(null,t), tt, d, ex!=null, p) : factory.dropColumn(factory.namedTable(null,t), tt, c, ex!=null) ); }
+    ;
+
+prop_name returns [String ret]
+    :   p=identifier {$ret = SchemaUtil.normalizeIdentifier(p); }
+    ;
+    
+properties returns [Map<String,Object> ret]
+@init{ret = new HashMap<String,Object>(); }
+    :  k=prop_name EQ v=prop_value {$ret.put(k,v);}  (COMMA k=prop_name EQ v=prop_value {$ret.put(k,v);} )*
+    ;
+
+column_defs returns [List<ColumnDef> ret]
+@init{ret = new ArrayList<ColumnDef>(); }
+    :  v = column_def {$ret.add(v);}  (COMMA v = column_def {$ret.add(v);} )*
+;
+
+column_def returns [ColumnDef ret]
+    :   c=column_name dt=identifier (LPAREN l=NUMBER (COMMA s=NUMBER)? RPAREN)? ar=ARRAY? (lsq=LSQUARE (a=NUMBER)? RSQUARE)? (n=NOT? NULL)? (pk=PRIMARY KEY (order=ASC|order=DESC)?)?
+        { $ret = factory.columnDef(c, dt, ar != null || lsq != null, a == null ? null :  Integer.parseInt( a.getText() ), n==null, 
+            l == null ? null : Integer.parseInt( l.getText() ),
+            s == null ? null : Integer.parseInt( s.getText() ),
+            pk != null, 
+            order == null ? null : ColumnModifier.fromDDLValue(order.getText()) ); }
+    ;
+
+dyn_column_defs returns [List<ColumnDef> ret]
+@init{ret = new ArrayList<ColumnDef>(); }
+    :  v = dyn_column_def {$ret.add(v);}  (COMMA v = dyn_column_def {$ret.add(v);} )*
+;
+
+dyn_column_def returns [ColumnDef ret]
+    :   c=column_name dt=identifier (LPAREN l=NUMBER (COMMA s=NUMBER)? RPAREN)? (lsq=LSQUARE (a=NUMBER)? RSQUARE)?
+        {$ret = factory.columnDef(c, dt, true,
+            l == null ? null : Integer.parseInt( l.getText() ),
+            s == null ? null : Integer.parseInt( s.getText() ),
+            false, 
+            null); }
+    ;
+
+dyn_column_name_or_def returns [ColumnDef ret]
+    :   c=column_name (dt=identifier (LPAREN l=NUMBER (COMMA s=NUMBER)? RPAREN)? )? (lsq=LSQUARE (a=NUMBER)? RSQUARE)?
+        {$ret = factory.columnDef(c, dt, true,
+            l == null ? null : Integer.parseInt( l.getText() ),
+            s == null ? null : Integer.parseInt( s.getText() ),
+            false, 
+            null); }
+    ;
+
+select_expression returns [SelectStatement ret]
+    :  SELECT s=select_node {$ret = s;}
+    ;
+    
+subquery_expression returns [ParseNode ret]
+    :  s=select_expression {$ret = factory.subquery(s);}
+    ;
+    
+// Parse a full select expression structure.
+select_node returns [SelectStatement ret]
+@init{ contextStack.push(new ParseContext()); }
+    :   (d=DISTINCT | ALL)? sel=select_list
+        FROM from=parseFrom
+        (WHERE where=condition)?
+        (GROUP BY group=group_by)?
+        (HAVING having=condition)?
+        (ORDER BY order=order_by)?
+        (LIMIT l=limit)?
+        { ParseContext context = contextStack.pop(); $ret = factory.select(from, null, d!=null, sel, where, group, having, order, l, getBindCount(), context.isAggregate()); }
+    ;
+
+// Parse a full select expression structure.
+hinted_select_node returns [SelectStatement ret]
+@init{ contextStack.push(new ParseContext()); }
+    :   (hint=hintClause)? 
+        s=select_node
+        { $ret = factory.select(s, hint); }
+    ;
+
+// Parse a full upsert expression structure.
+upsert_node returns [UpsertStatement ret]
+    :   UPSERT (hint=hintClause)? INTO t=from_table_name
+        (LPAREN p=upsert_column_refs RPAREN)?
+        ((VALUES LPAREN v=expression_terms RPAREN) | s=select_expression)
+        {ret = factory.upsert(factory.namedTable(null,t,p == null ? null : p.getFirst()), hint, p == null ? null : p.getSecond(), v, s, getBindCount()); }
+    ;
+
+upsert_column_refs returns [Pair<List<ColumnDef>,List<ColumnName>> ret]
+@init{ret = new Pair<List<ColumnDef>,List<ColumnName>>(new ArrayList<ColumnDef>(), new ArrayList<ColumnName>()); }
+    :  d=dyn_column_name_or_def { if (d.getDataType()!=null) { $ret.getFirst().add(d); } $ret.getSecond().add(d.getColumnDefName()); } 
+       (COMMA d=dyn_column_name_or_def { if (d.getDataType()!=null) { $ret.getFirst().add(d); } $ret.getSecond().add(d.getColumnDefName()); } )*
+;
+	
+// Parse a full delete expression structure.
+delete_node returns [DeleteStatement ret]
+    :   DELETE (hint=hintClause)? FROM t=from_table_name
+        (WHERE v=condition)?
+        (ORDER BY order=order_by)?
+        (LIMIT l=limit)?
+        {ret = factory.delete(factory.namedTable(null,t), hint, v, order, l, getBindCount()); }
+    ;
+
+limit returns [LimitNode ret]
+    : b=bind_expression { $ret = factory.limit(b); }
+    | l=int_literal { $ret = factory.limit(l); }
+    ;
+    
+hintClause returns [HintNode ret]
+    :  c=ML_HINT { $ret = factory.hint(c.getText()); }
+    ;
+
+// Parse the column/expression select list part of a select.
+select_list returns [List<AliasedNode> ret]
+@init{ret = new ArrayList<AliasedNode>();}
+    :   n=selectable {ret.add(n);} (COMMA n=selectable {ret.add(n);})*
+    |	ASTERISK { $ret = Collections.<AliasedNode>singletonList(factory.aliasedNode(null, factory.wildcard()));} // i.e. the '*' in 'select * from'    
+    ;
+
+// Parse either a select field or a sub select.
+selectable returns [AliasedNode ret]
+    :   field=expression (a=parseAlias)? { $ret = factory.aliasedNode(a, field); }
+    | 	familyName=identifier DOT ASTERISK { $ret = factory.aliasedNode(null, factory.family(familyName));} // i.e. the 'cf.*' in 'select cf.* from' cf being column family of an hbase table    
+    ;
+
+
+// Parse a group by statement
+group_by returns [List<ParseNode> ret]
+@init{ret = new ArrayList<ParseNode>();}
+    :   expr=expression { ret.add(expr); }
+        (COMMA expr = expression {ret.add(expr); })*
+    ;
+
+// Parse an order by statement
+order_by returns [List<OrderByNode> ret]
+@init{ret = new ArrayList<OrderByNode>();}
+    :   field=parseOrderByField { ret.add(field); }
+        (COMMA field = parseOrderByField {ret.add(field); })*
+    ;
+
+//parse the individual field for an order by clause
+parseOrderByField returns [OrderByNode ret]
+@init{boolean isAscending = true; boolean nullsLast = false;}
+    :   (expr = expression)
+        (ASC {isAscending = true;} | DESC {isAscending = false;})?
+        (NULLS (FIRST {nullsLast = false;} | LAST {nullsLast = true;}))?
+        { $ret = factory.orderBy(expr, nullsLast, isAscending); }
+    ;
+
+parseFrom returns [List<TableNode> ret]
+@init{ret = new ArrayList<TableNode>(4); }
+    :   t=table_ref {$ret.add(t);} (s=sub_table_ref { $ret.add(s); })*
+    ;
+    
+sub_table_ref returns [TableNode ret]
+    :   COMMA t=table_ref { $ret = t; }
+    |   t=join_spec { $ret = t; }
+    ;
+
+table_ref returns [TableNode ret]
+    :   n=bind_name ((AS)? alias=identifier)? { $ret = factory.bindTable(alias, factory.table(null,n)); } // TODO: review
+    |   t=from_table_name ((AS)? alias=identifier)? (LPAREN cdefs=dyn_column_defs RPAREN)? { $ret = factory.namedTable(alias,t,cdefs); }
+    |   LPAREN SELECT s=hinted_select_node RPAREN ((AS)? alias=identifier)? { $ret = factory.derivedTable(alias, s); }
+    ;
+
+join_spec returns [TableNode ret]
+    :   j=join_type JOIN t=table_ref ON e=condition { $ret = factory.join(j, e, t); }
+    ;
+
+join_type returns [JoinTableNode.JoinType ret]
+    :   INNER?   { $ret = JoinTableNode.JoinType.Inner; }
+    |   LEFT OUTER?   { $ret = JoinTableNode.JoinType.Left; }
+    |   RIGHT OUTER?  { $ret = JoinTableNode.JoinType.Right; }
+    |   FULL  OUTER?  { $ret = JoinTableNode.JoinType.Full; }
+    ;
+    
+parseAlias returns [String ret]
+    :   AS? alias=parseNoReserved { $ret = alias; }
+    ;
+
+// Parse a condition, such as used in a where clause - either a basic one, or an OR of (Single or AND) expressions
+condition returns [ParseNode ret]
+    :   e=condition_or { $ret = e; }
+    ;
+
+// A set of OR'd simple expressions
+condition_or returns [ParseNode ret]
+@init{List<ParseNode> l = new ArrayList<ParseNode>(4); }
+    :   i=condition_and {l.add(i);} (OR i=condition_and {l.add(i);})* { $ret = l.size() == 1 ? l.get(0) : factory.or(l); }
+    ;
+
+// A set of AND'd simple expressions
+condition_and returns [ParseNode ret]
+@init{List<ParseNode> l = new ArrayList<ParseNode>(4); }
+    :   i=condition_not {l.add(i);} (AND i=condition_not {l.add(i);})* { $ret = l.size() == 1 ? l.get(0) : factory.and(l); }
+    ;
+
+// NOT or parenthesis 
+condition_not returns [ParseNode ret]
+    :   (NOT? boolean_expr ) => n=NOT? e=boolean_expr { $ret = n == null ? e : factory.not(e); }
+    |   n=NOT? LPAREN e=condition RPAREN { $ret = n == null ? e : factory.not(e); }
+    ;
+
+boolean_expr returns [ParseNode ret]
+    :   l=expression ((EQ r=expression {$ret = factory.equal(l,r); } )
+                  |  ((NOEQ1 | NOEQ2) r=expression {$ret = factory.notEqual(l,r); } )
+                  |  (LT r=expression {$ret = factory.lt(l,r); } )
+                  |  (GT r=expression {$ret = factory.gt(l,r); } )
+                  |  (LT EQ r=expression {$ret = factory.lte(l,r); } )
+                  |  (GT EQ r=expression {$ret = factory.gte(l,r); } )
+                  |  (IS n=NOT? NULL {$ret = factory.isNull(l,n!=null); } )
+                  |  ( n=NOT? ((LIKE r=expression {$ret = factory.like(l,r,n!=null); } )
+                      |        (EXISTS LPAREN r=subquery_expression RPAREN {$ret = factory.exists(l,r,n!=null);} )
+                      |        (BETWEEN r1=expression AND r2=expression {$ret = factory.between(l,r1,r2,n!=null); } )
+                      |        ((IN ((r=bind_expression {$ret = factory.inList(Arrays.asList(l,r),n!=null);} )
+                                | (LPAREN r=subquery_expression RPAREN {$ret = factory.in(l,r,n!=null);} )
+                                | (v=list_expressions {List<ParseNode> il = new ArrayList<ParseNode>(v.size() + 1); il.add(l); il.addAll(v); $ret = factory.inList(il,n!=null);})
+                                )))
+                      ))
+                   |  { $ret = l; } )
+    ;
+
+bind_expression  returns [BindParseNode ret]
+    :   b=bind_name { $ret = factory.bind(b); }
+    ;
+    
+expression returns [ParseNode ret]
+    :   i=expression_add { $ret = i; }
+    ;
+
+expression_add returns [ParseNode ret]
+@init{List<ParseNode> l = new ArrayList<ParseNode>(4); }
+    :   i=expression_sub {l.add(i);} (PLUS i=expression_sub {l.add(i);})* { $ret = l.size() == 1 ? l.get(0) : factory.add(l); }
+    ;
+
+expression_sub returns [ParseNode ret]
+@init{List<ParseNode> l = new ArrayList<ParseNode>(4); }
+    :   i=expression_concat {l.add(i);} (MINUS i=expression_concat {l.add(i);})* { $ret = l.size() == 1 ? l.get(0) : factory.subtract(l); }
+    ;
+
+expression_concat returns [ParseNode ret]
+@init{List<ParseNode> l = new ArrayList<ParseNode>(4); }
+    :   i=expression_mult {l.add(i);} (CONCAT i=expression_mult {l.add(i);})* { $ret = l.size() == 1 ? l.get(0) : factory.concat(l); }
+    ;
+
+expression_mult returns [ParseNode ret]
+@init{List<ParseNode> l = new ArrayList<ParseNode>(4); }
+    :   i=expression_div {l.add(i);} (ASTERISK i=expression_div {l.add(i);})* { $ret = l.size() == 1 ? l.get(0) : factory.multiply(l); }
+    ;
+
+expression_div returns [ParseNode ret]
+@init{List<ParseNode> l = new ArrayList<ParseNode>(4); }
+    :   i=expression_negate {l.add(i);} (DIVIDE i=expression_negate {l.add(i);})* { $ret = l.size() == 1 ? l.get(0) : factory.divide(l); }
+    ;
+
+expression_negate returns [ParseNode ret]
+    :   m=MINUS? e=expression_term { $ret = m==null ? e : factory.negate(e); }
+    ;
+
+// The lowest level function, which includes literals, binds, but also parenthesized expressions, functions, and case statements.
+expression_term returns [ParseNode ret]
+    :   e=literal_or_bind_value { $ret = e; }
+    |   e=arrayable_expression_term (LSQUARE s=expression RSQUARE)?  { if (s == null) { $ret = e; } else { $ret = factory.arrayElemRef(Arrays.<ParseNode>asList(e,s)); } } 
+	;
+	    
+arrayable_expression_term returns [ParseNode ret]
+    :   field=identifier { $ret = factory.column(null,field,field); }
+    |   ex=ARRAY LSQUARE v=expression_terms RSQUARE {$ret = factory.upsertStmtArrayNode(v);}
+    |   tableName=table_name DOT field=identifier { $ret = factory.column(tableName, field, field); }
+    |   field=identifier LPAREN l=expression_list RPAREN wg=(WITHIN GROUP LPAREN ORDER BY l2=expression_terms (a=ASC | DESC) RPAREN)?
+        {
+            FunctionParseNode f = wg==null ? factory.function(field, l) : factory.function(field,l,l2,a!=null);
+            contextStack.peek().setAggregate(f.isAggregate());
+            $ret = f;
+        } 
+    |   field=identifier LPAREN t=ASTERISK RPAREN 
+        {
+            if (!isCountFunction(field)) {
+                throwRecognitionException(t); 
+            }
+            FunctionParseNode f = factory.function(field, LiteralParseNode.STAR);
+            contextStack.peek().setAggregate(f.isAggregate()); 
+            $ret = f;
+        } 
+    |   field=identifier LPAREN t=DISTINCT l=expression_list RPAREN 
+        {
+            FunctionParseNode f = factory.functionDistinct(field, l);
+            contextStack.peek().setAggregate(f.isAggregate());
+            $ret = f;
+        }
+    |   e=case_statement { $ret = e; }
+    |   LPAREN l=expression_terms RPAREN 
+    	{ 
+    		if(l.size() == 1) {
+    			$ret = l.get(0);
+    		}	
+    		else {
+    			$ret = factory.rowValueConstructor(l);
+    		}	 
+    	}
+    |   CAST e=expression AS dt=identifier { $ret = factory.cast(e, dt); }
+    |   (n=NEXT | CURRENT) VALUE FOR s=from_table_name { $ret = n==null ? factory.currentValueFor(s) : factory.nextValueFor(s);}    
+    ;
+
+expression_terms returns [List<ParseNode> ret]
+@init{ret = new ArrayList<ParseNode>(); }
+    :  e = expression {$ret.add(e);}  (COMMA e = expression {$ret.add(e);} )*
+;
+
+expression_list returns [List<ParseNode> ret]
+@init{ret = new ArrayList<ParseNode>(); }
+    :  (v = expression {$ret.add(v);})?  (COMMA v = expression {$ret.add(v);} )*
+;
+
+index_name returns [NamedNode ret]
+    :   name=identifier {$ret = factory.indexName(name); }
+    ;
+
+// TODO: figure out how not repeat this two times
+table_name returns [TableName ret]
+    :   t=identifier {$ret = factory.table(null, t); }
+    |   s=identifier DOT t=identifier {$ret = factory.table(s, t); }
+    ;
+
+// TODO: figure out how not repeat this two times
+from_table_name returns [TableName ret]
+    :   t=identifier {$ret = factory.table(null, t); }
+    |   s=identifier DOT t=identifier {$ret = factory.table(s, t); }
+    ;
+    
+// The lowest level function, which includes literals, binds, but also parenthesized expressions, functions, and case statements.
+literal_or_bind_value returns [ParseNode ret]
+    :   e=literal { $ret = e; }
+    |   b=bind_name { $ret = factory.bind(b); }    
+    ;
+
+// Get a string, integer, double, date, boolean, or NULL value.
+literal returns [LiteralParseNode ret]
+    :   t=STRING_LITERAL { ret = factory.literal(t.getText()); }
+    |   l=int_literal { ret = l; }
+    |   l=long_literal { ret = l; }
+    |   l=double_literal { ret = l; }
+    |   t=DECIMAL {
+            try {
+                ret = factory.literal(new BigDecimal(t.getText()));
+            } catch (NumberFormatException e) { // Shouldn't happen since we just parsed a decimal
+                throwRecognitionException(t);
+            }
+        }
+    |   NULL {ret = factory.literal(null);}
+    |   TRUE {ret = factory.literal(Boolean.TRUE);} 
+    |   FALSE {ret = factory.literal(Boolean.FALSE);}
+    ;
+    
+int_literal returns [LiteralParseNode ret]
+    :   n=NUMBER {
+            try {
+                Long v = Long.valueOf(n.getText());
+                if (v >= Integer.MIN_VALUE && v <= Integer.MAX_VALUE) {
+                    ret = factory.literal(v.intValue());
+                } else {
+                    ret = factory.literal(v);
+                }
+            } catch (NumberFormatException e) { // Shouldn't happen since we just parsed a number
+                throwRecognitionException(n);
+            }
+        }
+    ;
+
+long_literal returns [LiteralParseNode ret]
+    :   l=LONG {
+            try {
+                String lt = l.getText();
+                Long v = Long.valueOf(lt.substring(0, lt.length() - 1));
+                ret = factory.literal(v);
+            } catch (NumberFormatException e) { // Shouldn't happen since we just parsed a number
+                throwRecognitionException(l);
+            }
+        }
+    ;
+
+double_literal returns [LiteralParseNode ret]
+    :   d=DOUBLE {
+            try {
+                String dt = d.getText();
+                Double v = Double.valueOf(dt.substring(0, dt.length() - 1));
+                ret = factory.literal(v);
+            } catch (NumberFormatException e) { // Shouldn't happen since we just parsed a number
+                throwRecognitionException(d);
+            }
+        }
+    ;
+
+list_expressions returns [List<ParseNode> ret]
+@init{ret = new ArrayList<ParseNode>(); }
+    :   LPAREN  v = expression {$ret.add(v);}  (COMMA v = expression {$ret.add(v);} )* RPAREN
+;
+
+// parse a field, if it might be a bind name.
+table returns [String ret]
+    :   b=bind_name { $ret = b; }
+    |   n=parseNoReserved { $ret = n; }
+    ;
+
+// Bind names are a colon followed by 1+ letter/digits/underscores, or '?' (unclear how Oracle acutally deals with this, but we'll just treat it as a special bind)
+bind_name returns [String ret]
+    :   bname=BIND_NAME { String bnameStr = bname.getText().substring(1); updateBind(bnameStr); $ret = bnameStr; } 
+    |   QUESTION { $ret = nextBind(); } // TODO: only support this?
+    ;
+
+// Parse a field, includes line and column information.
+identifier returns [String ret]
+    :   c=parseNoReserved { $ret = c; }
+    ;
+
+parseNoReserved returns [String ret]
+    :   n=NAME { $ret = n.getText(); }
+    ;
+
+case_statement returns [ParseNode ret]
+@init{List<ParseNode> w = new ArrayList<ParseNode>(4);}
+    : CASE e1=expression (WHEN e2=expression THEN t=expression {w.add(t);w.add(factory.equal(e1,e2));})+ (ELSE el=expression {w.add(el);})? END {$ret = factory.caseWhen(w);}
+    | CASE (WHEN c=condition THEN t=expression {w.add(t);w.add(c);})+ (ELSE el=expression {w.add(el);})? END {$ret = factory.caseWhen(w);}
+    ;
+
+// --------------------------------------
+// The Lexer
+
+HINT_START: '/*+' ;
+COMMENT_START: '/*';
+COMMENT_AND_HINT_END: '*/' ;
+SL_COMMENT1: '//';
+SL_COMMENT2: '--';
+
+// Bind names start with a colon and followed by 1 or more letter/digit/underscores
+BIND_NAME
+    : COLON (LETTER|DIGIT|'_')+
+    ;
+
+// Valid names can have a single underscore, but not multiple
+// Turn back on literal testing, all names are literals.
+NAME
+    :    LETTER (FIELDCHAR)* ('\"' (DBL_QUOTE_CHAR)* '\"')?
+    |    '\"' (DBL_QUOTE_CHAR)* '\"'
+    ;
+
+// An integer number, positive or negative
+NUMBER
+    :   POSINTEGER
+    ;
+
+LONG
+    :   POSINTEGER ('L'|'l')
+    ;
+
+// Exponential format is not supported.
+DECIMAL
+    :   POSINTEGER? '.' POSINTEGER
+    ;
+
+DOUBLE
+    :   DECIMAL ('D'|'d')
+    ;
+
+DOUBLE_QUOTE
+    :   '"'
+    ;
+
+EQ
+    :   '='
+    ;
+
+LT
+    :   '<'
+    ;
+
+GT
+    :   '>'
+    ;
+
+DOUBLE_EQ
+    :   '=''='
+    ;
+
+NOEQ1
+    :   '!''='
+    ;
+
+NOEQ2
+    :   '<''>'
+    ;
+
+CONCAT
+    :   '|''|'
+    ;
+
+COMMA
+    :   ','
+    ;
+
+LPAREN
+    :   '('
+    ;
+
+RPAREN
+    :   ')'
+    ;
+
+SEMICOLON
+    :   ';'
+    ;
+
+COLON
+    :   ':'
+    ;
+
+QUESTION
+    :   '?'
+    ;
+
+LSQUARE
+    :   '['
+    ;
+
+RSQUARE
+    :   ']'
+    ;
+
+LCURLY
+    :   '{'
+    ;
+
+RCURLY
+    :   '}'
+    ;
+
+AT
+    :   '@'
+    ;
+
+TILDE
+    :   '~'
+    ;
+
+PLUS
+    :   '+'
+    ;
+
+MINUS
+    :   '-'
+    ;
+
+ASTERISK
+    :   '*'
+    ;
+
+DIVIDE
+    :   '/'
+    ;
+
+OUTER_JOIN
+    : '(' '+' ')'
+    ;
+// A FieldCharacter is a letter, digit, underscore, or a certain unicode section.
+fragment
+FIELDCHAR
+    :    LETTER
+    |    DIGIT
+    |    '_'
+    |    '\u0080'..'\ufffe'
+    ;
+
+// A Letter is a lower or upper case ascii character.
+fragment
+LETTER
+    :    'a'..'z'
+    |    'A'..'Z'
+    ;
+
+fragment
+POSINTEGER
+    :   DIGIT+
+    ;
+
+fragment
+DIGIT
+    :    '0'..'9'
+    ;
+
+// string literals
+STRING_LITERAL
+@init{ StringBuilder sb = new StringBuilder(); }
+    :   '\''
+    ( t=CHAR { sb.append(t.getText()); }
+    | t=CHAR_ESC { sb.append(getText()); }
+    )* '\'' { setText(sb.toString()); }
+    ;
+
+fragment
+CHAR
+    :   ( ~('\'' | '\\') )+
+    ;
+
+fragment
+DBL_QUOTE_CHAR
+    :   ( ~('\"') )+
+    ;
+
+// escape sequence inside a string literal
+fragment
+CHAR_ESC
+    :   '\\'
+        ( 'n'   { setText("\n"); }
+        | 'r'   { setText("\r"); }
+        | 't'   { setText("\t"); }
+        | 'b'   { setText("\b"); }
+        | 'f'   { setText("\f"); }
+        | '\"'  { setText("\""); }
+        | '\''  { setText("\'"); }
+        | '\\'  { setText("\\"); }
+        | '_'   { setText("\\_"); }
+        | '%'   { setText("\\\%"); }
+        )
+    |   '\'\''  { setText("\'"); }
+    ;
+
+// whitespace (skip)
+WS
+    :   ( ' ' | '\t' ) { $channel=HIDDEN; }
+    ;
+    
+EOL
+    :  ('\r' | '\n')
+    { skip(); }
+    ;
+
+// Keep everything in comment in a case sensitive manner
+ML_HINT
+@init{ StringBuilder sb = new StringBuilder(); }
+    : h=HINT_START ( options {greedy=false;} : t=.)*  { sb.append($text); }  COMMENT_AND_HINT_END
+    { setText(sb.substring(h.getText().length())); } // Get rid of the HINT_START text
+    ;
+
+ML_COMMENT
+    : COMMENT_START (~PLUS) ( options {greedy=false;} : . )* COMMENT_AND_HINT_END
+    { skip(); }
+    ;
+
+SL_COMMENT
+    : (SL_COMMENT1 | SL_COMMENT2) ( options {greedy=false;} : . )* EOL
+    { skip(); }
+    ;
+
+DOT
+    : '.'
+    ;
+

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/50d523f6/phoenix-core/src/main/java/org/apache/hadoop/hbase/index/CapturingAbortable.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/hadoop/hbase/index/CapturingAbortable.java b/phoenix-core/src/main/java/org/apache/hadoop/hbase/index/CapturingAbortable.java
new file mode 100644
index 0000000..5de5428
--- /dev/null
+++ b/phoenix-core/src/main/java/org/apache/hadoop/hbase/index/CapturingAbortable.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2010 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.index;
+
+import org.apache.hadoop.hbase.Abortable;
+
+/**
+ * {@link Abortable} that can rethrow the cause of the abort.
+ */
+public class CapturingAbortable implements Abortable {
+
+  private Abortable delegate;
+  private Throwable cause;
+  private String why;
+
+  public CapturingAbortable(Abortable delegate) {
+    this.delegate = delegate;
+  }
+
+  @Override
+  public void abort(String why, Throwable e) {
+    if (delegate.isAborted()) {
+      return;
+    }
+    this.why = why;
+    this.cause = e;
+    delegate.abort(why, e);
+
+  }
+
+  @Override
+  public boolean isAborted() {
+    return delegate.isAborted();
+  }
+
+  /**
+   * Throw the cause of the abort, if <tt>this</tt> was aborted. If there was an exception causing
+   * the abort, re-throws that. Otherwise, just throws a generic {@link Exception} with the reason
+   * why the abort was caused.
+   * @throws Throwable the cause of the abort.
+   */
+  public void throwCauseIfAborted() throws Throwable {
+    if (!this.isAborted()) {
+      return;
+    }
+    if (cause == null) {
+      throw new Exception(why);
+    }
+    throw cause;
+  }
+}
\ No newline at end of file