You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/09/12 03:21:29 UTC

svn commit: r1522098 [2/30] - in /hive/branches/vectorization: ./ beeline/src/test/org/apache/hive/beeline/src/test/ bin/ bin/ext/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/conf/ conf/ contrib/src/java/org/ap...

Propchange: hive/branches/vectorization/
------------------------------------------------------------------------------
  Merged /hive/trunk:r1513660-1522095

Modified: hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java (original)
+++ hive/branches/vectorization/beeline/src/test/org/apache/hive/beeline/src/test/TestBeeLineWithArgs.java Thu Sep 12 01:21:10 2013
@@ -40,7 +40,9 @@ import org.junit.Test;
 public class TestBeeLineWithArgs {
 
   // Default location of HiveServer2
-  final static String JDBC_URL = BeeLine.BEELINE_DEFAULT_JDBC_URL + "localhost:10000";
+  final static String BASE_JDBC_URL = BeeLine.BEELINE_DEFAULT_JDBC_URL + "localhost:10000";
+  //set JDBC_URL to something else in test case, if it needs to be customized
+  String JDBC_URL = BASE_JDBC_URL;
 
   private static HiveServer2 hiveServer2;
 
@@ -100,7 +102,7 @@ public class TestBeeLineWithArgs {
    * in the output (stdout or stderr), fail if not found
    * Print PASSED or FAILED
    * @paramm testName Name of test to print
-   * @param expecttedPattern Text to look for in command output
+   * @param expectedPattern Text to look for in command output/error
    * @param shouldMatch true if the pattern should be found, false if it should not
    * @throws Exception on command execution error
    */
@@ -239,4 +241,13 @@ public class TestBeeLineWithArgs {
     System.out.println(">>> PASSED " + "testNPE" );
   }
 
+  @Test
+  public void testHiveVarSubstitution() throws Throwable {
+    JDBC_URL = BASE_JDBC_URL + "#D_TBL=dummy_t";
+    final String TEST_NAME = "testHiveVarSubstitution";
+    final String SCRIPT_TEXT = "create table ${D_TBL} (d int);\nshow tables;\n";
+    final String EXPECTED_PATTERN = "dummy_t";
+    testScriptFile(TEST_NAME, SCRIPT_TEXT, EXPECTED_PATTERN, true);
+  }
+
 }

Modified: hive/branches/vectorization/bin/ext/metatool.sh
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/bin/ext/metatool.sh?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/bin/ext/metatool.sh (original)
+++ hive/branches/vectorization/bin/ext/metatool.sh Thu Sep 12 01:21:10 2013
@@ -17,12 +17,13 @@ THISSERVICE=metatool
 export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
 
 metatool () {
-
+  HIVE_OPTS=''
   CLASS=org.apache.hadoop.hive.metastore.tools.HiveMetaTool
   execHiveCmd $CLASS "$@"
 }
 
 metatool_help () {
+  HIVE_OPTS=''
   CLASS=org.apache.hadoop.hive.metastore.tools.HiveMetaTool
   execHiveCmd $CLASS "--help"
 }

Modified: hive/branches/vectorization/bin/hive
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/bin/hive?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/bin/hive (original)
+++ hive/branches/vectorization/bin/hive Thu Sep 12 01:21:10 2013
@@ -110,8 +110,8 @@ if [ -d "${HIVE_AUX_JARS_PATH}" ]; then
     fi
   done
 elif [ "${HIVE_AUX_JARS_PATH}" != "" ]; then 
+  HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/,/:/g'`
   if $cygwin; then
-      HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/,/:/g'`
       HIVE_AUX_JARS_PATH=`cygpath -p -w "$HIVE_AUX_JARS_PATH"`
       HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/;/,/g'`
   fi

Modified: hive/branches/vectorization/build-common.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/build-common.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/build-common.xml (original)
+++ hive/branches/vectorization/build-common.xml Thu Sep 12 01:21:10 2013
@@ -298,8 +298,6 @@
      destdir="${test.build.classes}"
      debug="${javac.debug}"
      optimize="${javac.optimize}"
-     target="${javac.version}"
-     source="${javac.version}"
      deprecation="${javac.deprecation}"
      includeantruntime="false">
       <compilerarg line="${javac.args} ${javac.args.warnings}" />
@@ -312,8 +310,6 @@
      destdir="${test.build.classes}"
      debug="${javac.debug}"
      optimize="${javac.optimize}"
-     target="${javac.version}"
-     source="${javac.version}"
      deprecation="${javac.deprecation}"
      includeantruntime="false">
       <compilerarg line="${javac.args} ${javac.args.warnings}" />
@@ -331,6 +327,10 @@
         <fileset dir="${test.build.classes}" includes="**/udf/*.class"/>
         <fileset dir="${test.build.classes}" includes="**/udf/generic/*.class"/>
     </jar>
+    <delete file="${test.build.dir}/test-serdes.jar"/>
+    <jar jarfile="${test.build.dir}/test-serdes.jar">
+        <fileset dir="${test.build.classes}" includes="**/serde2/*.class"/>
+    </jar>  	
   </target>
 
   <target name="test-conditions">

Modified: hive/branches/vectorization/build.properties
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/build.properties?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/build.properties (original)
+++ hive/branches/vectorization/build.properties Thu Sep 12 01:21:10 2013
@@ -16,12 +16,11 @@
 
 Name=Hive
 name=hive
-version=0.12.0-SNAPSHOT
+version=0.13.0-SNAPSHOT
 hcatalog.version=${version}
 year=2012
 
 javac.debug=on
-javac.version=1.6
 javac.optimize=on
 javac.deprecation=off
 javac.args=
@@ -29,7 +28,7 @@ javac.args.warnings=
 
 hadoop-0.20.version=0.20.2
 hadoop-0.20S.version=1.1.2
-hadoop-0.23.version=2.0.5-alpha
+hadoop-0.23.version=2.1.0-beta
 hadoop.version=${hadoop-0.20.version}
 hadoop.security.version=${hadoop-0.20S.version}
 # Used to determine which set of Hadoop artifacts we depend on.
@@ -147,6 +146,8 @@ mvn.pom.dir=${build.dir.hive}/maven/poms
 mvn.license.dir=${build.dir.hive}/maven/licenses
 mvn.deploy.id=apache.snapshots.https
 mvn.deploy.url=https://repository.apache.org/content/repositories/snapshots
+mvn.staging.repo.id=apache.staging.https
+mvn.staging.repo.url=https://repository.apache.org/service/local/staging/deploy/maven2
 
 #
 # Data nucleus repository - needed for jdo2-api-2.3-ec.jar download

Modified: hive/branches/vectorization/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/build.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/build.xml (original)
+++ hive/branches/vectorization/build.xml Thu Sep 12 01:21:10 2013
@@ -239,7 +239,7 @@
   -->
   <target name="ivy-init-antlib" depends="ivy-download,ivy-init-dirs,ivy-probe-antlib" unless="ivy.found">
     <echo message="Project: ${ant.project.name}"/>
-    <typedef uri="antlib:org.apache.ivy.ant" onerror="fail" loaderRef="ivyLoader">
+    <typedef uri="antlib:org.apache.ivy.ant" onerror="fail" loaderRef="ivyLoader" resource="org/apache/ivy/ant/antlib.xml">
       <classpath>
         <pathelement location="${ivy.jar}"/>
       </classpath>
@@ -1135,8 +1135,6 @@
           todir="${mvn.jar.dir}" />
     <copy file="${build.dir.hive}/metastore/hive-metastore-${version}.jar"
           todir="${mvn.jar.dir}" />
-    <copy file="${build.dir.hive}/hcatalog/hive-hcatalog-${hcatalog.version}.jar"
-          todir="${mvn.jar.dir}" />
     <copy file="${build.dir.hive}/ql/hive-exec-${version}.jar"
           todir="${mvn.jar.dir}" />
     <copy file="${build.dir.hive}/serde/hive-serde-${version}.jar"
@@ -1165,9 +1163,7 @@
           tofile="${mvn.pom.dir}/hive-jdbc-${version}.pom" />
     <copy file="${build.dir.hive}/metastore/pom.xml"
           tofile="${mvn.pom.dir}/hive-metastore-${version}.pom" />
-    <copy file="${build.dir.hive}/hcatalog/pom.xml"
-          tofile="${mvn.pom.dir}/hive-hcatalog-${hcatalog.version}.pom" />
-        <copy file="${build.dir.hive}/ql/pom.xml"
+    <copy file="${build.dir.hive}/ql/pom.xml"
           tofile="${mvn.pom.dir}/hive-exec-${version}.pom" />
     <copy file="${build.dir.hive}/serde/pom.xml"
           tofile="${mvn.pom.dir}/hive-serde-${version}.pom" />
@@ -1217,8 +1213,7 @@
             file="${mvn.jar.dir}/hive-${hive.project}-${version}.jar">
           <pom refid="hive.project.pom" />
           <remoteRepository
-              id="apache.staging.https"
-              url="https://repository.apache.org/service/local/staging/deploy/maven2" />
+              id="${mvn.staging.repo.id}" url="${mvn.staging.repo.url}"/>
           <attach file="${mvn.jar.dir}/hive-${hive.project}-${version}.jar.asc"
                   type="jar.asc"/>
           <attach file="${mvn.pom.dir}/hive-${hive.project}-${version}.pom.asc"
@@ -1279,9 +1274,6 @@
       <param name="hive.project" value="metastore" />
     </antcall>
     <antcall target="maven-publish-artifact">
-      <param name="hive.project" value="hcatalog" />
-    </antcall>
-    <antcall target="maven-publish-artifact">
       <param name="hive.project" value="serde" />
     </antcall>
     <antcall target="maven-publish-artifact">
@@ -1290,6 +1282,28 @@
     <antcall target="maven-publish-artifact">
       <param name="hive.project" value="shims" />
     </antcall>
+    <!-- Handle HCat separately; matches maven-publish-artifact-->
+    <if>
+      <equals arg1="${mvn.publish.repo}" arg2="staging" />
+      <then>
+        <ant dir="hcatalog" target="mvn-deploy-signed">
+          <property name="mvn.deploy.repo.id" value="${mvn.staging.repo.id}"/>
+          <property name="mvn.deploy.repo.url" value="${mvn.staging.repo.url}"/>
+        </ant>
+      </then>
+      <elseif>
+        <equals arg1="${mvn.publish.repo}" arg2="local"/>
+          <then>
+            <!-- NOP, HCat always publishes to the local repo in jar target-->
+          </then>
+      </elseif>
+      <else>
+        <ant dir="hcatalog" target="mvn-deploy">
+          <property name="mvn.deploy.repo.id" value="${mvn.deploy.id}"/>
+          <property name="mvn.deploy.repo.url" value="${mvn.deploy.url}"/>
+        </ant>
+      </else>
+    </if>
   </target>
 
   <target name="maven-sign" if="staging">
@@ -1415,16 +1429,6 @@
         output.file="${mvn.jar.dir}/hive-metastore-${version}.pom.asc"
         gpg.passphrase="${gpg.passphrase}"/>
 
-    <!-- hive-hcatalog -->
-    <sign-artifact
-        input.file="${mvn.pom.dir}/hive-hcatalog-${hcatalog.version}.jar"
-        output.file="${mvn.pom.dir}/hive-hcatalog-${hcatalog.version}.jar.asc"
-        gpg.passphrase="${gpg.passphrase}"/>
-    <sign-artifact
-        input.file="${mvn.pom.dir}/hive-hcatalog-${hcatalog.version}.pom"
-        output.file="${mvn.pom.dir}/hive-hcatalog-${hcatalog.version}.pom.asc"
-        gpg.passphrase="${gpg.passphrase}"/>
-
     <!-- hive-serde -->
     <sign-artifact
         input.file="${mvn.jar.dir}/hive-serde-${version}.jar"

Modified: hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/common/ObjectPair.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/common/ObjectPair.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/common/ObjectPair.java (original)
+++ hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/common/ObjectPair.java Thu Sep 12 01:21:10 2013
@@ -26,6 +26,14 @@ public class ObjectPair<F, S> {
 
   public ObjectPair() {}
 
+  /**
+   * Creates a pair. Constructor doesn't infer template args but
+   * the method does, so the code becomes less ugly.
+   */
+  public static <T1, T2> ObjectPair<T1, T2> create(T1 f, T2 s) {
+    return new ObjectPair<T1, T2>(f, s);
+  }
+
   public ObjectPair(F first, S second) {
     this.first = first;
     this.second = second;

Modified: hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/vectorization/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu Sep 12 01:21:10 2013
@@ -177,6 +177,7 @@ public class HiveConf extends Configurat
     // QL execution stuff
     SCRIPTWRAPPER("hive.exec.script.wrapper", null),
     PLAN("hive.exec.plan", ""),
+    PLAN_SERIALIZATION("hive.plan.serialization.format","kryo"),
     SCRATCHDIR("hive.exec.scratchdir", "/tmp/hive-" + System.getProperty("user.name")),
     LOCALSCRATCHDIR("hive.exec.local.scratchdir", System.getProperty("java.io.tmpdir") + File.separator + System.getProperty("user.name")),
     SUBMITVIACHILD("hive.exec.submitviachild", false),
@@ -505,9 +506,8 @@ public class HiveConf extends Configurat
 
     // Maximum fraction of heap that can be used by ORC file writers
     HIVE_ORC_FILE_MEMORY_POOL("hive.exec.orc.memory.pool", 0.5f), // 50%
-    // use 0.11 version of RLE encoding. if this conf is not defined or any
-    // other value specified, ORC will use the new RLE encoding
-    HIVE_ORC_WRITE_FORMAT("hive.exec.orc.write.format", "0.11"),
+    // Define the version of the file to write
+    HIVE_ORC_WRITE_FORMAT("hive.exec.orc.write.format", null),
 
     HIVE_ORC_DICTIONARY_KEY_SIZE_THRESHOLD("hive.exec.orc.dictionary.key.size.threshold", 0.8f),
 
@@ -525,6 +525,8 @@ public class HiveConf extends Configurat
     HIVELIMITOPTLIMITFILE("hive.limit.optimize.limit.file", 10),
     HIVELIMITOPTENABLE("hive.limit.optimize.enable", false),
     HIVELIMITOPTMAXFETCH("hive.limit.optimize.fetch.max", 50000),
+    HIVELIMITPUSHDOWNMEMORYUSAGE("hive.limit.pushdown.memory.usage", -1f),
+
     HIVEHASHTABLETHRESHOLD("hive.hashtable.initialCapacity", 100000),
     HIVEHASHTABLELOADFACTOR("hive.hashtable.loadfactor", (float) 0.75),
     HIVEHASHTABLEFOLLOWBYGBYMAXMEMORYUSAGE("hive.mapjoin.followby.gby.localtask.max.memory.usage", (float) 0.55),
@@ -660,6 +662,8 @@ public class HiveConf extends Configurat
     // 'minimal', 'more' (and 'all' later)
     HIVEFETCHTASKCONVERSION("hive.fetch.task.conversion", "minimal"),
 
+    HIVEFETCHTASKAGGR("hive.fetch.task.aggr", false),
+
     // Serde for FetchTask
     HIVEFETCHOUTPUTSERDE("hive.fetch.output.serde", "org.apache.hadoop.hive.serde2.DelimitedJSONSerDe"),
 
@@ -732,7 +736,13 @@ public class HiveConf extends Configurat
     HIVE_ENTITY_SEPARATOR("hive.entity.separator", "@"),
 
     HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS("hive.server2.thrift.min.worker.threads", 5),
-    HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 100),
+    HIVE_SERVER2_THRIFT_MAX_WORKER_THREADS("hive.server2.thrift.max.worker.threads", 500),
+
+    // Configuration for async thread pool in SessionManager
+    // Number of async threads
+    HIVE_SERVER2_ASYNC_EXEC_THREADS("hive.server2.async.exec.threads", 50),
+    // Number of seconds HiveServer2 shutdown will wait for async threads to terminate
+    HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT("hive.server2.async.exec.shutdown.timeout", 10),
 
     HIVE_SERVER2_THRIFT_PORT("hive.server2.thrift.port", 10000),
     HIVE_SERVER2_THRIFT_BIND_HOST("hive.server2.thrift.bind.host", ""),
@@ -749,6 +759,7 @@ public class HiveConf extends Configurat
     HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS("hive.server2.custom.authentication.class", null),
     HIVE_SERVER2_ENABLE_DOAS("hive.server2.enable.doAs", true),
     HIVE_SERVER2_TABLE_TYPE_MAPPING("hive.server2.table.type.mapping", "HIVE"),
+    HIVE_SERVER2_SESSION_HOOK("hive.server2.session.hook", ""),
 
     HIVE_CONF_RESTRICTED_LIST("hive.conf.restricted.list", null),
 
@@ -790,12 +801,6 @@ public class HiveConf extends Configurat
     // Whether to show the unquoted partition names in query results.
     HIVE_DECODE_PARTITION_NAME("hive.decode.partition.name", false),
 
-    // ptf partition constants
-    HIVE_PTF_PARTITION_PERSISTENCE_CLASS("hive.ptf.partition.persistence",
-      "org.apache.hadoop.hive.ql.exec.PTFPersistence$PartitionedByteBasedList"),
-    HIVE_PTF_PARTITION_PERSISTENT_SIZE("hive.ptf.partition.persistence.memsize",
-      (int) Math.pow(2, (5 + 10 + 10)) ), // 32MB
-
     //Vectorization enabled
     HIVE_VECTORIZATION_ENABLED("hive.vectorized.execution.enabled", false),
     ;

Modified: hive/branches/vectorization/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/conf/hive-default.xml.template?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/conf/hive-default.xml.template (original)
+++ hive/branches/vectorization/conf/hive-default.xml.template Thu Sep 12 01:21:10 2013
@@ -431,19 +431,19 @@
 <property>
   <name>hive.mapjoin.followby.map.aggr.hash.percentmemory</name>
   <value>0.3</value>
-  <description>Portion of total memory to be used by map-side grup aggregation hash table, when this group by is followed by map join</description>
+  <description>Portion of total memory to be used by map-side group aggregation hash table, when this group by is followed by map join</description>
 </property>
 
 <property>
   <name>hive.map.aggr.hash.force.flush.memory.threshold</name>
   <value>0.9</value>
-  <description>The max memory to be used by map-side grup aggregation hash table, if the memory usage is higher than this number, force to flush data</description>
+  <description>The max memory to be used by map-side group aggregation hash table, if the memory usage is higher than this number, force to flush data</description>
 </property>
 
 <property>
   <name>hive.map.aggr.hash.percentmemory</name>
   <value>0.5</value>
-  <description>Portion of total memory to be used by map-side grup aggregation hash table</description>
+  <description>Portion of total memory to be used by map-side group aggregation hash table</description>
 </property>
 
 <property>
@@ -1584,6 +1584,12 @@
 </property>
 
 <property>
+  <name>hive.limit.pushdown.memory.usage</name>
+  <value>0.3f</value>
+  <description>The max memory to be used for hash in RS operator for top K selection.</description>
+</property>
+
+<property>
   <name>hive.rework.mapredwork</name>
   <value>false</value>
   <description>should rework the mapred work or not.
@@ -1848,11 +1854,24 @@
 
 <property>
   <name>hive.server2.thrift.max.worker.threads</name>
-  <value>100</value>
+  <value>500</value>
   <description>Maximum number of Thrift worker threads</description>
 </property>
 
 <property>
+  <name>hive.server2.async.exec.threads</name>
+  <value>50</value>
+  <description>Number of threads in the async thread pool for HiveServer2</description>
+</property>
+
+<property>
+  <name>hive.server2.async.exec.shutdown.timeout</name>
+  <value>10</value>
+  <description>Time (in seconds) for which HiveServer2 shutdown will wait for async 
+  threads to terminate</description>
+</property>
+
+<property>
   <name>hive.server2.thrift.port</name>
   <value>10000</value>
   <description>Port number of HiveServer2 Thrift interface.
@@ -1948,8 +1967,22 @@
 <property>
   <name>hive.server2.thrift.sasl.qop</name>
   <value>auth</auth>
-  <description>Sasl QOP value; one of 'auth', 'auth-int' and 'auth-conf'</description>
+  <description>Sasl QOP value; Set it to one of following values to enable higher levels of
+     protection for hive server2 communication with clients.
+      "auth" - authentication only (default)
+      "auth-int" - authentication plus integrity protection
+      "auth-conf" - authentication plus integrity and confidentiality protection
+     This is applicable only hive server2 is configured to use kerberos authentication.
+ </description>
 </property>
 
-</configuration>
+<property>
+  <name>hive.plan.serialization.format</name>
+  <value>kryo</value>
+  <description>
+  Query plan format serialization between client and task nodes. 
+  Two supported values are : kryo and javaXML. Kryo is default.
+  </description>
+</property>
 
+</configuration>

Modified: hive/branches/vectorization/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java (original)
+++ hive/branches/vectorization/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFCount2.java Thu Sep 12 01:21:10 2013
@@ -36,8 +36,8 @@ import org.apache.hadoop.hive.serde2.obj
  */
 public class GenericUDTFCount2 extends GenericUDTF {
 
-  Integer count = Integer.valueOf(0);
-  Object forwardObj[] = new Object[1];
+  private transient Integer count = Integer.valueOf(0);
+  private transient Object forwardObj[] = new Object[1];
 
   @Override
   public void close() throws HiveException {

Modified: hive/branches/vectorization/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java (original)
+++ hive/branches/vectorization/contrib/src/java/org/apache/hadoop/hive/contrib/udtf/example/GenericUDTFExplode2.java Thu Sep 12 01:21:10 2013
@@ -67,7 +67,7 @@ public class GenericUDTFExplode2 extends
         fieldOIs);
   }
 
-  Object forwardObj[] = new Object[2];
+  private transient Object forwardObj[] = new Object[2];
 
   @Override
   public void process(Object[] o) throws HiveException {

Modified: hive/branches/vectorization/eclipse-templates/.classpath
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/eclipse-templates/.classpath?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/eclipse-templates/.classpath (original)
+++ hive/branches/vectorization/eclipse-templates/.classpath Thu Sep 12 01:21:10 2013
@@ -67,6 +67,7 @@
   <classpathentry kind="lib" path="build/ivy/lib/default/antlr-runtime-@antlr-runtime.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/junit-@junit.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/jdo-api-@jdo-api.version@.jar"/>
+  <classpathentry kind="lib" path="build/ivy/lib/default/kryo-@kryo.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/datanucleus-api-jdo-@datanucleus-api-jdo.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/datanucleus-core-@datanucleus-core.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/datanucleus-rdbms-@datanucleus-rdbms.version@.jar"/>

Modified: hive/branches/vectorization/hcatalog/bin/hcat
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/bin/hcat?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/bin/hcat (original)
+++ hive/branches/vectorization/hcatalog/bin/hcat Thu Sep 12 01:21:10 2013
@@ -151,12 +151,12 @@ export HADOOP_OPTS=$HADOOP_OPTS
 # run it
 if [ "$debug" == "true" ]; then
 	echo "Would run:"
-	echo "exec $HADOOP_PREFIX/bin/hadoop jar $HCAT_JAR org.apache.hcatalog.cli.HCatCli $remaining"
+	echo "exec $HADOOP_PREFIX/bin/hadoop jar $HCAT_JAR org.apache.hive.hcatalog.cli.HCatCli $remaining"
 	echo "with HADOOP_CLASSPATH set to ($HADOOP_CLASSPATH)"
 	echo "and HADOOP_OPTS set to ($HADOOP_OPTS)"
 elif [ "$dump_classpath" == "true" ]; then
     echo $HADOOP_CLASSPATH
 else
-	exec $HADOOP_PREFIX/bin/hadoop jar  $HCAT_JAR org.apache.hcatalog.cli.HCatCli "$@"
+	exec $HADOOP_PREFIX/bin/hadoop jar  $HCAT_JAR org.apache.hive.hcatalog.cli.HCatCli "$@"
 fi
 

Modified: hive/branches/vectorization/hcatalog/build-support/ant/deploy.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/build-support/ant/deploy.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/build-support/ant/deploy.xml (original)
+++ hive/branches/vectorization/hcatalog/build-support/ant/deploy.xml Thu Sep 12 01:21:10 2013
@@ -69,7 +69,7 @@
       <_mvnpublish module="testutils" />
     </target>
 
-    <target name="mvn-init" unless="mvn-init.complete" description="Get Maven Ant Tasts jar and deploy all Hive jars to local Maven repo">
+    <target name="mvn-init" unless="mvn-init.complete" description="Get Maven Ant Tasks jar and deploy all Hive jars to local Maven repo">
         <echo message="${ant.project.name}"/>
         <get src="${mvnrepo}/org/apache/maven/maven-ant-tasks/${maven-ant-tasks.version}/maven-ant-tasks-${maven-ant-tasks.version}.jar"
              dest="${path.to.basedir}/build/maven-ant-tasks-${maven-ant-tasks.version}.jar"
@@ -80,7 +80,7 @@
                  classpath="${path.to.basedir}/build/maven-ant-tasks-${maven-ant-tasks.version}.jar"/>
         <antcall target="hive-mvn-publish" />
         <artifact:dependencies>
-            <dependency groupId="org.apache.hcatalog" artifactId="hcatalog" version="${hcatalog.version}" scope="system" systemPath="${path.to.basedir}/pom.xml"/>
+            <dependency groupId="org.apache.hive.hcatalog" artifactId="hcatalog" version="${hcatalog.version}" scope="system" systemPath="${path.to.basedir}/pom.xml"/>
             <artifact:localRepository path="${mvn.local.repo}"/>
         </artifact:dependencies>
         <artifact:pom id="mvn.pom" file="pom.xml">

Modified: hive/branches/vectorization/hcatalog/build-support/ant/test.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/build-support/ant/test.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/build-support/ant/test.xml (original)
+++ hive/branches/vectorization/hcatalog/build-support/ant/test.xml Thu Sep 12 01:21:10 2013
@@ -31,8 +31,10 @@
 
       <delete dir="${test.logs}"/>
       <delete dir="${test.warehouse.dir}"/>
+      <delete dir="${test.data.dir}"/>
       <mkdir dir="${test.logs}"/>
       <mkdir dir="${test.warehouse.dir}"/>
+      <mkdir dir="${test.data.dir}"/>
 
       <junit showoutput="${test.output}"
              printsummary="yes"
@@ -45,6 +47,7 @@
              failureProperty="tests.failed">
         <sysproperty key="hadoop.log.dir" value="${test.logs}"/>
         <sysproperty key="hive.metastore.warehouse.dir" value="${test.warehouse.dir}"/>
+        <sysproperty key="test.data.dir" value="${test.data.dir}"/>
         <sysproperty key="java.security.krb5.realm" value=""/> <!-- HADOOP-7489 -->
         <sysproperty key="java.security.krb5.kdc" value=""/> <!-- HADOOP-7489 -->
         <!--HCAT_PREFIX, HIVE_HOME are needed by WebHCat tests-->

Modified: hive/branches/vectorization/hcatalog/build-support/checkstyle/coding_style.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/build-support/checkstyle/coding_style.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/build-support/checkstyle/coding_style.xml (original)
+++ hive/branches/vectorization/hcatalog/build-support/checkstyle/coding_style.xml Thu Sep 12 01:21:10 2013
@@ -58,6 +58,7 @@ imposed on others.
 
         <module name="Indentation">
             <property name="caseIndent" value="0"/>
+            <property name="basicOffset" value="2"/>
         </module>
 
     </module>

Modified: hive/branches/vectorization/hcatalog/build.properties
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/build.properties?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/build.properties (original)
+++ hive/branches/vectorization/hcatalog/build.properties Thu Sep 12 01:21:10 2013
@@ -15,7 +15,7 @@
 # specific language governing permissions and limitations
 # under the License.
 
-hive.version=0.12.0-SNAPSHOT
+hive.version=0.13.0-SNAPSHOT
 hcatalog.version=${hive.version}
 jar.name=${ant.project.name}-${hcatalog.version}.jar
 hcatalog.jar=${ant.project.name}-${hcatalog.version}.jar
@@ -32,6 +32,7 @@ build.docs=${build.dir}/docs
 build.javadoc=${build.docs}/api
 dist.dir=${build.dir}/${final.name}
 
+test.data.dir=${build.dir}/data
 test.dir=${build.dir}/test
 test.classes=${test.dir}/classes
 test.logs=${test.dir}/logs
@@ -62,16 +63,9 @@ excludes=
 javac.debug=on
 javac.optimize=on
 javac.deprecation=off
-javac.version=1.6
 javac.args=
 javac.args.warnings=
 
-# hive properties
-#shims.name=20
-shims.20S.hive.shims.include=0.20,0.20S
-shims.20S.hadoop.version=${hive.hadoop-0.20S.version}
-shims.23.hive.shims.include=0.23
-shims.23.hadoop.version=${hive.hadoop-0.23.version}
 
 ###############################################################################
 # deploy properties

Modified: hive/branches/vectorization/hcatalog/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/build.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/build.xml (original)
+++ hive/branches/vectorization/hcatalog/build.xml Thu Sep 12 01:21:10 2013
@@ -186,7 +186,6 @@
         <ant target="clean" dir="webhcat/svr" inheritAll="false"/>
         <ant target="clean" dir="webhcat/java-client" inheritAll="false"/>
         <ant target="clean" dir="storage-handlers/hbase" inheritAll="false"/>
-        <ant target="clean" dir="shims" inheritAll="false"/>
     </target>
 
     <!-- Clean up children -->
@@ -200,7 +199,6 @@
         <ant target="clean-test" dir="webhcat/svr" inheritAll="false"/>
         <ant target="clean-test" dir="webhcat/java-client" inheritAll="false"/>
         <ant target="clean-test" dir="storage-handlers/hbase" inheritAll="false"/>
-        <ant target="clean-test" dir="shims" inheritAll="false"/>
     </target>
 
     <!--
@@ -225,8 +223,10 @@
     <target name="javadoc" depends="jar" description="Generate Javadoc documentation" unless="skip.javadoc">
         <mkdir dir="${build.javadoc}"/>
         <record name="${build.dir}/javadoc.log" action="start"/>
+        <!--NOTE: old org.apache.hcatalog is intentionally excluded since 0.11 
+        version of the JavaDocs is available online-->
         <javadoc overview="${src.dir}/../docs/overview.html"
-                 packagenames="org.apache.hcatalog.*"
+                 packagenames="org.apache.hive.hcatalog.*" 
                  destdir="${build.javadoc}"
                  author="true"
                  version="true"
@@ -244,17 +244,14 @@
             <packageset dir="webhcat/svr/src/main/java"/>
             <packageset dir="webhcat/java-client/src/main/java"/>
             <classpath>
-                <fileset dir="core/build/lib/test"/>
-                <fileset dir="hcatalog-pig-adapter/build/lib/compile"/>
-                <fileset dir="hcatalog-pig-adapter/build/lib/provided"/>
-                <fileset dir="server-extensions/build/lib/compile"/>
-                <fileset dir="server-extensions/build/lib/provided"/>
-                <fileset dir="storage-handlers/hbase/build/lib/compile"/>
-                <fileset dir="storage-handlers/hbase/build/lib/provided"/>
-                <fileset dir="webhcat/svr/build/lib/compile"/>
-                <fileset dir="webhcat/svr/build/lib/provided"/>
+                <fileset dir="core/build/lib/compile"/>
+                <fileset dir="hcatalog-pig-adapter/build/lib/compile" includes="pig*.jar"/>
+                <fileset dir="server-extensions/build/lib/compile" includes="jms*.jar"/>
+                <fileset dir="storage-handlers/hbase/build/lib/compile" includes="hbase*.jar,hive-hbase*.jar"/>
+                <fileset dir="webhcat/svr/build/lib/provided" includes="hcatalog-core*.jar"/>
+                <fileset dir="webhcat/svr/build/lib/compile" includes="jetty*.jar,jersey*.jar,commons-exec*.jar,jul-to-slf4j*.jar"/>
             </classpath>
-            <group title="hcatalog" packages="org.apache.hcatalog.*"/>
+            <group title="hcatalog" packages="org.apache.hive.hcatalog.*"/>
         </javadoc>
         <record name="${build.dir}/javadoc.log" action="stop"/>
         <condition property="javadoc.warnings">
@@ -364,6 +361,8 @@
         <copy todir="${dist.dir}/bin">
             <fileset dir="bin/">
                 <include name="hcat"/>
+                <include name="hcat.py"/>
+                <include name="hcatcfg.py"/>
             </fileset>
 
         </copy>
@@ -408,6 +407,8 @@
             </fileset>
             <fileset dir="bin">
                 <include name="hcat_server.sh"/>
+                <include name="hcat_server.py"/>
+                <include name="hcatcfg.py"/>
             </fileset>
         </copy>
 
@@ -430,21 +431,19 @@
             <property name="dist.handlers.dir" value="${handlers.dir}"/>
         </ant>
 
-        <!--
-        While ant has a native tar task, we call out to the command-line
-        tar command because it preserves file permissions of our carefully
-        constructed package dir.
-        -->
-        <!--
-        <exec executable="tar">
-            <arg value="-C"/>
-            <arg value="${build.dir}"/>
-            <arg value="-czf"/>
-            <arg value="${build.dir}/${final.name}.tar.gz"/>
-            <arg value="${final.name}"/>
-        </exec>
+        <tar destfile="${build.dir}/${final.name}.tar.gz" longfile="gnu" compression="gzip">
+            <tarfileset dir="${build.dir}" filemode="755">
+                <include name="${final.name}/bin/**"/>
+                <include name="${final.name}/sbin/**"/>
+            </tarfileset>
+            <tarfileset dir="${build.dir}">
+                <include name="${final.name}/**"/>
+                <exclude name="${final.name}/bin/**"/>
+                <exclude name="${final.name}/sbin/**"/>
+            </tarfileset>
+        </tar>
         <checksum file="${build.dir}/${final.name}.tar.gz" forceOverwrite="yes"/>
-        -->
+
     </target>
 
     <target name="releaseaudit" depends="init" description="generate a release audit report">
@@ -480,7 +479,6 @@
                 <include name="server-extensions/**"/>
                 <include name="webhcat/**"/>
                 <include name="license/**"/>
-                <include name="shims/**"/>
                 <include name="src/**"/>
                 <include name="storage-handlers/**"/>
                 <include name="*.properties"/>
@@ -517,5 +515,6 @@
     <import file="build-support/ant/deploy.xml"/>
     <import file="build-support/ant/findbugs.xml"/>
     <import file="build-support/ant/test.xml"/>
-
+    
+    <target name="make-pom" description="no-op in hcatalog, here to make hive's build work"/>
 </project>

Modified: hive/branches/vectorization/hcatalog/conf/proto-hive-site.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/conf/proto-hive-site.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/conf/proto-hive-site.xml (original)
+++ hive/branches/vectorization/hcatalog/conf/proto-hive-site.xml Thu Sep 12 01:21:10 2013
@@ -96,7 +96,7 @@
 
 <property>
   <name>hive.semantic.analyzer.factory.impl</name>
-  <value>org.apache.hcatalog.cli.HCatSemanticAnalyzerFactory</value>
+  <value>org.apache.hive.hcatalog.cli.HCatSemanticAnalyzerFactory</value>
   <description>controls which SemanticAnalyzerFactory implemenation class is used by CLI</description>
 </property>
 
@@ -114,7 +114,7 @@
 
 <property>
   <name>hive.security.authorization.manager</name>
-  <value>org.apache.hcatalog.security.StorageDelegationAuthorizationProvider</value>
+  <value>org.apache.hive.hcatalog.security.StorageDelegationAuthorizationProvider</value>
   <description>the hive client authorization manager class name.
   The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider.
   HCatalog uses a model, where authorization checks are delegated to the storage layer (hdfs, hbase, ...).

Modified: hive/branches/vectorization/hcatalog/core/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/build.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/build.xml (original)
+++ hive/branches/vectorization/hcatalog/core/build.xml Thu Sep 12 01:21:10 2013
@@ -39,15 +39,4 @@
   <path id="findbugs.class.path">
     <fileset dir="${build.dir}/lib/compile"/>
   </path>
-
-  <target name="compile">
-    <echo message="${ant.project.name}"/>
-    <_javac srcDir="${basedir}/src/main/java"
-            destDir="${build.classes}"
-            classPathRef="compile.class.path"/>
-    <ant target="jar" dir="${path.to.basedir}/shims" inheritAll="false">
-        <property name="_mvn.hadoop.profile" value="${mvn.hadoop.profile}"/>
-    </ant>
-  </target>
-
 </project>

Modified: hive/branches/vectorization/hcatalog/core/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/pom.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/core/pom.xml (original)
+++ hive/branches/vectorization/hcatalog/core/pom.xml Thu Sep 12 01:21:10 2013
@@ -22,14 +22,13 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
 
     <parent>
-        <groupId>org.apache.hcatalog</groupId>
+        <groupId>org.apache.hive.hcatalog</groupId>
         <artifactId>hcatalog</artifactId>
-        <version>0.12.0-SNAPSHOT</version>
+        <version>0.13.0-SNAPSHOT</version>
         <relativePath>../pom.xml</relativePath>
     </parent>
 
     <modelVersion>4.0.0</modelVersion>
-    <groupId>org.apache.hcatalog</groupId>
     <artifactId>hcatalog-core</artifactId>
     <packaging>jar</packaging>
     <name>hcatalog-core</name>

Copied: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HiveClientCache.java (from r1520709, hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HiveClientCache.java)
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HiveClientCache.java?p2=hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HiveClientCache.java&p1=hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HiveClientCache.java&r1=1520709&r2=1522098&rev=1522098&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hcatalog/common/HiveClientCache.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hcatalog/common/HiveClientCache.java Thu Sep 12 01:21:10 2013
@@ -27,7 +27,6 @@ import org.apache.commons.lang.builder.H
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.TException;
@@ -274,11 +273,7 @@ class HiveClientCache {
     protected boolean isOpen() {
       try {
         // Look for an unlikely database name and see if either MetaException or TException is thrown
-        this.getDatabase("NonExistentDatabaseUsedForHealthCheck");
-      } catch (NoSuchObjectException e) {
-        return true; // It is okay if the database doesn't exist
-      } catch (MetaException e) {
-        return false;
+        this.getDatabases("NonExistentDatabaseUsedForHealthCheck");
       } catch (TException e) {
         return false;
       }

Copied: hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java (from r1520709, hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java)
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java?p2=hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java&p1=hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java&r1=1520709&r2=1522098&rev=1522098&view=diff
==============================================================================
--- hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java (original)
+++ hive/branches/vectorization/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java Thu Sep 12 01:21:10 2013
@@ -27,7 +27,6 @@ import org.apache.commons.lang.builder.H
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.TException;
@@ -273,11 +272,7 @@ class HiveClientCache {
     protected boolean isOpen() {
       try {
         // Look for an unlikely database name and see if either MetaException or TException is thrown
-        this.getDatabase("NonExistentDatabaseUsedForHealthCheck");
-      } catch (NoSuchObjectException e) {
-        return true; // It is okay if the database doesn't exist
-      } catch (MetaException e) {
-        return false;
+        this.getDatabases("NonExistentDatabaseUsedForHealthCheck");
       } catch (TException e) {
         return false;
       }

Modified: hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/pom.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/pom.xml (original)
+++ hive/branches/vectorization/hcatalog/hcatalog-pig-adapter/pom.xml Thu Sep 12 01:21:10 2013
@@ -22,14 +22,13 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
 
     <parent>
-        <groupId>org.apache.hcatalog</groupId>
+        <groupId>org.apache.hive.hcatalog</groupId>
         <artifactId>hcatalog</artifactId>
-        <version>0.12.0-SNAPSHOT</version>
+        <version>0.13.0-SNAPSHOT</version>
         <relativePath>../pom.xml</relativePath>
     </parent>
 
     <modelVersion>4.0.0</modelVersion>
-    <groupId>org.apache.hcatalog</groupId>
     <artifactId>hcatalog-pig-adapter</artifactId>
     <packaging>jar</packaging>
     <name>hcatalog-pig-adapter</name>
@@ -37,7 +36,7 @@
 
     <dependencies>
         <dependency>
-            <groupId>org.apache.hcatalog</groupId>
+            <groupId>org.apache.hive.hcatalog</groupId>
             <artifactId>hcatalog-core</artifactId>
             <version>${hcatalog.version}</version>
             <scope>compile</scope>

Modified: hive/branches/vectorization/hcatalog/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/pom.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/pom.xml (original)
+++ hive/branches/vectorization/hcatalog/pom.xml Thu Sep 12 01:21:10 2013
@@ -27,7 +27,7 @@
       <commons-io.version>2.4</commons-io.version>
       <guava.version>11.0.2</guava.version>
       <hadoop20.version>1.0.3</hadoop20.version>
-      <hadoop23.version>2.0.3-alpha</hadoop23.version>
+      <hadoop23.version>2.1.0-beta</hadoop23.version>
       <hbase.version>0.94.5</hbase.version>
       <hcatalog.version>${project.version}</hcatalog.version>
       <hive.version>${project.version}</hive.version>
@@ -43,9 +43,9 @@
   </properties>
 
   <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.hcatalog</groupId>
+  <groupId>org.apache.hive.hcatalog</groupId>
   <artifactId>hcatalog</artifactId>
-  <version>0.12.0-SNAPSHOT</version>
+  <version>0.13.0-SNAPSHOT</version>
   <packaging>pom</packaging>
 
   <build>
@@ -215,7 +215,16 @@
         <enabled>false</enabled>
       </snapshots>
     </repository>
-
+    <repository>
+      <id>sonatype-snapshots</id>
+      <url>https://oss.sonatype.org/content/repositories/snapshots/</url>
+      <releases>
+        <enabled>false</enabled>
+      </releases>
+      <snapshots>
+        <enabled>true</enabled>
+      </snapshots>
+    </repository>
   </repositories>
 
     <dependencies>

Modified: hive/branches/vectorization/hcatalog/server-extensions/pom.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/server-extensions/pom.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/server-extensions/pom.xml (original)
+++ hive/branches/vectorization/hcatalog/server-extensions/pom.xml Thu Sep 12 01:21:10 2013
@@ -22,14 +22,13 @@
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
 
     <parent>
-        <groupId>org.apache.hcatalog</groupId>
+        <groupId>org.apache.hive.hcatalog</groupId>
         <artifactId>hcatalog</artifactId>
-        <version>0.12.0-SNAPSHOT</version>
+        <version>0.13.0-SNAPSHOT</version>
         <relativePath>../pom.xml</relativePath>
     </parent>
 
     <modelVersion>4.0.0</modelVersion>
-    <groupId>org.apache.hcatalog</groupId>
     <artifactId>hcatalog-server-extensions</artifactId>
     <packaging>jar</packaging>
     <name>server-extensions</name>
@@ -61,7 +60,7 @@
             <scope>compile</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.hcatalog</groupId>
+            <groupId>org.apache.hive.hcatalog</groupId>
             <artifactId>hcatalog-core</artifactId>
             <version>${hcatalog.version}</version>
             <scope>compile</scope>

Modified: hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/authorization.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/authorization.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/authorization.xml (original)
+++ hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/authorization.xml Thu Sep 12 01:21:10 2013
@@ -185,7 +185,7 @@
 
   &lt;property&gt;
     &lt;name&gt;hive.security.authorization.manager&lt;/name&gt;
-    &lt;value&gt;org.apache.hcatalog.security.HdfsAuthorizationProvider&lt;/value&gt;
+    &lt;value&gt;org.apache.hive.hcatalog.security.HdfsAuthorizationProvider&lt;/value&gt;
     &lt;description&gt;the hive client authorization manager class name.
     The user defined authorization class should implement interface
     org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider.

Modified: hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml (original)
+++ hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml Thu Sep 12 01:21:10 2013
@@ -228,7 +228,7 @@ Using localhost in metastore uri does no
 
   <tr>
     <td><strong>templeton.storage.class</strong></td>
-    <td><code>org.apache.hcatalog.templeton.tool.ZooKeeperStorage</code></td>
+    <td><code>org.apache.hive.hcatalog.templeton.tool.ZooKeeperStorage</code></td>
     <td>The class to use as storage</td>
   </tr>
 
@@ -286,6 +286,46 @@ Using localhost in metastore uri does no
         principal.</td>
   </tr>
 
+   <tr>
+     <td><strong>webhcat.proxyuser.#USER#.hosts</strong></td>
+     <td>None</td>
+     <td>List of client hosts from which the '#USER#' user is allowed to perform
+         'doAs' operations.
+
+         The '#USER#' must be replaced with the username of the user who is
+         allowed to perform 'doAs' operations.
+
+         The value can be the '*' wildcard, which means every host is allowed,
+         or a comma-separated list of hostnames.
+
+         If value is a blank string or webhcat.proxyuser.#USER#.hosts is missing,
+         no hosts will be allowed.
+
+         For multiple users copy this property and replace the user name
+         in the property name.</td>
+   </tr>
+   <tr>
+     <td><strong>webhcat.proxyuser.#USER#.groups</strong></td>
+     <td>None</td>
+     <td>List of groups the '#USER#' user is allowed to impersonate users
+         from to perform 'doAs' operations.
+
+         The '#USER#' must be replaced with the username of the user who is
+         allowed to perform 'doAs' operations.
+
+         The value can be the '*' wildcard, which means any doAs value is
+         allowed, or a comma-separated list of groups.
+
+         If value is an empty list or webhcat.proxyuser.#USER#.groups is missing,
+         every doAs call value will fail.
+
+         For multiple users copy this property and replace the user name
+         in the property name.
+
+         The username->usergroup mapping is performed using Hadoop API which is
+         controlled by hadoop.security.group.mapping property.</td>
+    </tr>
+
   </table>
   </section>
 

Modified: hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/install.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/install.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/install.xml (original)
+++ hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/install.xml Thu Sep 12 01:21:10 2013
@@ -186,7 +186,7 @@
         </tr>
         <tr>
             <td>hive.semantic.analyzer.factory.impl</td>
-            <td>org.apache.hcatalog.cli.HCatSemanticAnalyzerFactory</td>
+            <td>org.apache.hive.hcatalog.cli.HCatSemanticAnalyzerFactory</td>
         </tr>
         <tr>
             <td>hive.metastore.warehouse.dir</td>

Modified: hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/listproperties.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/listproperties.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/listproperties.xml (original)
+++ hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/listproperties.xml Thu Sep 12 01:21:10 2013
@@ -86,10 +86,10 @@
  "properties": {
    "fruit": "apple",
    "last_modified_by": "ctdean",
-   "hcat.osd": "org.apache.hcatalog.rcfile.RCFileOutputDriver",
+   "hcat.osd": "org.apache.hive.hcatalog.rcfile.RCFileOutputDriver",
    "color": "blue",
    "last_modified_time": "1331620706",
-   "hcat.isd": "org.apache.hcatalog.rcfile.RCFileInputDriver",
+   "hcat.isd": "org.apache.hive.hcatalog.rcfile.RCFileInputDriver",
    "transient_lastDdlTime": "1331620706",
    "comment": "Best table made today",
    "country": "Albania"

Modified: hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/listtables.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/listtables.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/listtables.xml (original)
+++ hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/listtables.xml Thu Sep 12 01:21:10 2013
@@ -100,10 +100,10 @@
         at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1332)
         at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1123)
         at org.apache.hadoop.hive.ql.Driver.run(Driver.java:931)
-        at org.apache.hcatalog.cli.HCatDriver.run(HCatDriver.java:42)
-        at org.apache.hcatalog.cli.HCatCli.processCmd(HCatCli.java:247)
-        at org.apache.hcatalog.cli.HCatCli.processLine(HCatCli.java:203)
-        at org.apache.hcatalog.cli.HCatCli.main(HCatCli.java:162)
+        at org.apache.hive.hcatalog.cli.HCatDriver.run(HCatDriver.java:42)
+        at org.apache.hive.hcatalog.cli.HCatCli.processCmd(HCatCli.java:247)
+        at org.apache.hive.hcatalog.cli.HCatCli.processLine(HCatCli.java:203)
+        at org.apache.hive.hcatalog.cli.HCatCli.main(HCatCli.java:162)
         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)

Modified: hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/loadstore.xml
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/loadstore.xml?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/loadstore.xml (original)
+++ hive/branches/vectorization/hcatalog/src/docs/src/documentation/content/xdocs/loadstore.xml Thu Sep 12 01:21:10 2013
@@ -44,7 +44,7 @@ required for these interfaces.</p>
 
 <p>HCatLoader is accessed via a Pig load statement.</p>
 <source>
-A = LOAD 'tablename' USING org.apache.hcatalog.pig.HCatLoader(); 
+A = LOAD 'tablename' USING org.apache.hive.hcatalog.pig.HCatLoader(); 
 </source>
 
 <p><strong>Assumptions</strong></p>
@@ -285,7 +285,7 @@ and to be able to authenticate to the HC
 <p>This load statement will load all partitions of the specified table.</p>
 <source>
 /* myscript.pig */
-A = LOAD 'tablename' USING org.apache.hcatalog.pig.HCatLoader(); 
+A = LOAD 'tablename' USING org.apache.hive.hcatalog.pig.HCatLoader(); 
 ...
 ...
 </source>
@@ -296,7 +296,7 @@ might not immediately follow its load st
 include conditions on partition as well as non-partition columns.</p>
 <source>
 /* myscript.pig */
-A = LOAD 'tablename' USING  org.apache.hcatalog.pig.HCatLoader();
+A = LOAD 'tablename' USING  org.apache.hive.hcatalog.pig.HCatLoader();
 
 -- date is a partition column; age is not
 B = filter A by date == '20100819' and age &lt; 30; 
@@ -310,7 +310,7 @@ C = filter A by date == '20100819' and c
 <p>To scan a whole table, for example:</p>
 
 <source>
-a = load 'student_data' using org.apache.hcatalog.pig.HCatLoader();
+a = load 'student_data' using org.apache.hive.hcatalog.pig.HCatLoader();
 b = foreach a generate name, age;
 </source>
 
@@ -321,7 +321,7 @@ declare name and age as fields, as if yo
 datestamp, for example:</p>
 
 <source>
-a = load 'web_logs' using org.apache.hcatalog.pig.HCatLoader();
+a = load 'web_logs' using org.apache.hive.hcatalog.pig.HCatLoader();
 b = filter a by datestamp == '20110924';
 </source>
 
@@ -329,7 +329,7 @@ b = filter a by datestamp == '20110924';
 datestamp = '20110924'. You can combine this filter with others via 'and':</p>
 
 <source>
-a = load 'web_logs' using org.apache.hcatalog.pig.HCatLoader();
+a = load 'web_logs' using org.apache.hive.hcatalog.pig.HCatLoader();
 b = filter a by datestamp == '20110924' and user is not null;
 </source>
 
@@ -345,26 +345,26 @@ and '&gt;='.</p>
 <p>For example:</p>
 
 <source>
-a = load 'web_logs' using org.apache.hcatalog.pig.HCatLoader();
+a = load 'web_logs' using org.apache.hive.hcatalog.pig.HCatLoader();
 b = filter a by datestamp &gt; '20110924';
 </source>
 
 <p>A complex filter can have various combinations of operators, such as:</p>
 
 <source>
-a = load 'web_logs' using org.apache.hcatalog.pig.HCatLoader();
+a = load 'web_logs' using org.apache.hive.hcatalog.pig.HCatLoader();
 b = filter a by datestamp == '20110924' or datestamp == '20110925';
 </source>
 
 <p>These two examples have the same effect:</p>
 
 <source>
-a = load 'web_logs' using org.apache.hcatalog.pig.HCatLoader();
+a = load 'web_logs' using org.apache.hive.hcatalog.pig.HCatLoader();
 b = filter a by datestamp &gt;= '20110924' and datestamp &lt;= '20110925';
 </source>
 
 <source>
-a = load 'web_logs' using org.apache.hcatalog.pig.HCatLoader();
+a = load 'web_logs' using org.apache.hive.hcatalog.pig.HCatLoader();
 b = filter a by datestamp &lt;= '20110925' and datestamp &gt;= '20110924';
 </source>
 
@@ -391,7 +391,7 @@ B = FOREACH A ...
 my_processed_data = ...
 
 STORE my_processed_data INTO 'tablename'
-   USING org.apache.hcatalog.pig.HCatStorer();
+   USING org.apache.hive.hcatalog.pig.HCatStorer();
 </source>
 
 <p><strong>Assumptions</strong></p>
@@ -414,17 +414,17 @@ should <em>NOT</em> be quoted.</p>
 
 <p>You can write to a non-partitioned table simply by using HCatStorer.  The contents of the table will be overwritten:</p>
 
-<source>store z into 'web_data' using org.apache.hcatalog.pig.HCatStorer();</source>
+<source>store z into 'web_data' using org.apache.hive.hcatalog.pig.HCatStorer();</source>
 
 <p>To add one new partition to a partitioned table, specify the partition value in the store function. Pay careful
 attention to the quoting, as the whole string must be single quoted and separated with an equals sign:</p>
 
-<source>store z into 'web_data' using org.apache.hcatalog.pig.HCatStorer('datestamp=20110924');</source>
+<source>store z into 'web_data' using org.apache.hive.hcatalog.pig.HCatStorer('datestamp=20110924');</source>
 
 <p>To write into multiple partitions at once, make sure that the partition column is present in your data, then call
 HCatStorer with no argument:</p>
 
-<source>store z into 'web_data' using org.apache.hcatalog.pig.HCatStorer(); 
+<source>store z into 'web_data' using org.apache.hive.hcatalog.pig.HCatStorer(); 
   -- datestamp must be a field in the relation z</source>
 
 </section>

Modified: hive/branches/vectorization/hcatalog/src/packages/templates/conf/hive-site.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/packages/templates/conf/hive-site.xml.template?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/packages/templates/conf/hive-site.xml.template (original)
+++ hive/branches/vectorization/hcatalog/src/packages/templates/conf/hive-site.xml.template Thu Sep 12 01:21:10 2013
@@ -87,7 +87,7 @@
 
 <property>
   <name>hive.semantic.analyzer.factory.impl</name>
-  <value>org.apache.hcatalog.cli.HCatSemanticAnalyzerFactory</value>
+  <value>org.apache.hive.hcatalog.cli.HCatSemanticAnalyzerFactory</value>
   <description>controls which SemanticAnalyzerFactory implemenation class is used by CLI</description>
 </property>
 

Modified: hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm (original)
+++ hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm Thu Sep 12 01:21:10 2013
@@ -145,7 +145,7 @@ sub runTest
 	               $testCmd->{'group'} .  "_" .  $testCmd->{'num'} . ".$i.out";
                    $tableName = $results[$i];
 	           $modifiedTestCmd{'num'} = $testCmd->{'num'} . "_" . $i . "_benchmark";
-                   $modifiedTestCmd{'pig'} = "a = load '$tableName' using org.apache.hcatalog.pig.HCatLoader(); store a into ':OUTPATH:';";
+                   $modifiedTestCmd{'pig'} = "a = load '$tableName' using org.apache.hive.hcatalog.pig.HCatLoader(); store a into ':OUTPATH:';";
                    my $r = $self->runPig(\%modifiedTestCmd, $log, 1);
 	           $outputs[$i] = $r->{'output'};
                } else {
@@ -185,7 +185,7 @@ sub dumpPigTable
     my $outfile = $testCmd->{'outpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'}  . $id . "dump.out";
 
     open(FH, "> $pigfile") or die "Unable to open file $pigfile to write pig script, $ERRNO\n";
-    print FH "a = load '$table' using org.apache.hcatalog.pig.HCatLoader(); store a into '$outfile';\n";
+    print FH "a = load '$table' using org.apache.hive.hcatalog.pig.HCatLoader(); store a into '$outfile';\n";
     close(FH);
 
 

Modified: hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverPig.pm
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverPig.pm?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverPig.pm (original)
+++ hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/drivers/TestDriverPig.pm Thu Sep 12 01:21:10 2013
@@ -184,7 +184,7 @@ sub runTest
 	               $testCmd->{'group'} .  "_" .  $testCmd->{'num'} . ".$i.out";
                    $tableName = $results[$i];
 	           $modifiedTestCmd{'num'} = $testCmd->{'num'} . "_" . $i . "_benchmark";
-                   $modifiedTestCmd{'pig'} = "a = load '$tableName' using org.apache.hcatalog.pig.HCatLoader(); store a into ':OUTPATH:';";
+                   $modifiedTestCmd{'pig'} = "a = load '$tableName' using org.apache.hive.hcatalog.pig.HCatLoader(); store a into ':OUTPATH:';";
                    my $r = $self->runPig(\%modifiedTestCmd, $log, 1, 1);
 	           $outputs[$i] = $r->{'output'};
                } else {

Modified: hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf (original)
+++ hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/tests/hadoop.conf Thu Sep 12 01:21:10 2013
@@ -40,7 +40,7 @@ $cfg = {
 				{
 				 'num' => 1
 				,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.SimpleRead -libjars :HCAT_JAR: :THRIFTSERVER: studenttab10k :OUTPATH:
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.SimpleRead -libjars :HCAT_JAR: :THRIFTSERVER: studenttab10k :OUTPATH:
 \,
                                 ,'sql' => q\select name, age from studenttab10k;\
                                 ,'floatpostprocess' => 1
@@ -51,7 +51,7 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                 ,'hcat_prep'=>q\drop table if exists hadoop_checkin_2;
 create table hadoop_checkin_2 (name string, age int, gpa double) STORED AS TEXTFILE;\
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.ReadWrite -libjars :HCAT_JAR: :THRIFTSERVER: studenttab10k hadoop_checkin_2
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.ReadWrite -libjars :HCAT_JAR: :THRIFTSERVER: studenttab10k hadoop_checkin_2
 \,
                                 ,'result_table' => 'hadoop_checkin_2'
                                 ,'sql' => q\select * from studenttab10k;\
@@ -63,7 +63,7 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                 ,'hcat_prep'=>q\drop table if exists hadoop_checkin_3;
 create table hadoop_checkin_3 (age int, cnt int) STORED AS TEXTFILE;\
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.GroupByAge -libjars :HCAT_JAR: :THRIFTSERVER: studenttab10k hadoop_checkin_3
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.GroupByAge -libjars :HCAT_JAR: :THRIFTSERVER: studenttab10k hadoop_checkin_3
 \,
                                 ,'result_table' => 'hadoop_checkin_3'
                                 ,'sql' => q\select age, count(*) from studenttab10k group by age;\
@@ -71,7 +71,7 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                  # Read from a partitioned table
                                  'num' => 4
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.SimpleRead -libjars :HCAT_JAR: :THRIFTSERVER: studentparttab30k :OUTPATH:
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.SimpleRead -libjars :HCAT_JAR: :THRIFTSERVER: studentparttab30k :OUTPATH:
 \,
                                 ,'sql' => q\select name, age from studentparttab30k;\
                                 ,'floatpostprocess' => 1
@@ -82,7 +82,7 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                 ,'hcat_prep'=>q\drop table if exists hadoop_checkin_5;
 create table hadoop_checkin_5 (name string, age int) partitioned by (ds string) STORED AS TEXTFILE;\
                                 ,'hadoop' => q?
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteTextPartitioned -libjars :HCAT_JAR: :THRIFTSERVER: studentparttab30k hadoop_checkin_5 ds=\"20110924\"
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.WriteTextPartitioned -libjars :HCAT_JAR: :THRIFTSERVER: studentparttab30k hadoop_checkin_5 ds=\"20110924\"
 ?,
                                 ,'result_table' => 'hadoop_checkin_5'
                                 ,'sql' => q\select name, age, ds from studentparttab30k where ds='20110924';\
@@ -94,7 +94,7 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                 ,'hcat_prep'=>q\drop table if exists hadoop_checkin_6;
 create table hadoop_checkin_6 (name string, age int) partitioned by (ds string) STORED AS TEXTFILE;\
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteTextPartitioned -libjars :HCAT_JAR: :THRIFTSERVER: studentparttab30k hadoop_checkin_6
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.WriteTextPartitioned -libjars :HCAT_JAR: :THRIFTSERVER: studentparttab30k hadoop_checkin_6
 \,
                                 ,'result_table' => 'hadoop_checkin_6'
                                 ,'sql' => q\select name, age, ds from studentparttab30k;\
@@ -109,7 +109,7 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                 {
                                  'num' => 1
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.ReadText -libjars :HCAT_JAR: :THRIFTSERVER: all100k :OUTPATH:
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.ReadText -libjars :HCAT_JAR: :THRIFTSERVER: all100k :OUTPATH:
 \,
                                 ,'sql' => q\select * from all100k;\
                                 ,'floatpostprocess' => 1
@@ -118,7 +118,7 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                 {
                                  'num' => 2
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.ReadJson -libjars :HCAT_JAR: :THRIFTSERVER: all100kjson :OUTPATH:
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.ReadJson -libjars :HCAT_JAR: :THRIFTSERVER: all100kjson :OUTPATH:
 \,
                                 ,'sql' => q\select s, i, d from all100kjson;\
                                 ,'floatpostprocess' => 1
@@ -127,7 +127,7 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                 {
                                  'num' => 3
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.ReadRC -libjars :HCAT_JAR: :THRIFTSERVER: all100krc :OUTPATH:
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.ReadRC -libjars :HCAT_JAR: :THRIFTSERVER: all100krc :OUTPATH:
 \,
                                 ,'sql' => q\select name, age, floor(gpa) + 0.1 from all100krc;\
                                 ,'floatpostprocess' => 1
@@ -155,7 +155,7 @@ create table hadoop_write_1(
         fields terminated by ':'
         stored as textfile;\
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteText -libjars :HCAT_JAR: :THRIFTSERVER: all100k hadoop_write_1
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.WriteText -libjars :HCAT_JAR: :THRIFTSERVER: all100k hadoop_write_1
 \,
                                 ,'result_table' => 'hadoop_write_1'
                                 ,'sql' => q\select * from all100k;\
@@ -172,10 +172,10 @@ create table hadoop_write_2(
             d double,
             m map<string, string>,
             bb array<struct<a: int, b: string>>)
-            row format serde 'org.apache.hcatalog.data.JsonSerDe'
+            row format serde 'org.apache.hive.hcatalog.data.JsonSerDe'
             stored as textfile;\
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteJson -libjars :HCAT_JAR: :THRIFTSERVER: all100kjson hadoop_write_2
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.WriteJson -libjars :HCAT_JAR: :THRIFTSERVER: all100kjson hadoop_write_2
 \,
                                 ,'result_table' => 'hadoop_write_2'
                                 ,'sql' => q\select s, i, d, '', '' from all100kjson;\
@@ -193,7 +193,7 @@ create table hadoop_write_3(
 stored as rcfile;
 \,
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteRC -libjars :HCAT_JAR: :THRIFTSERVER: all100krc hadoop_write_3
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.WriteRC -libjars :HCAT_JAR: :THRIFTSERVER: all100krc hadoop_write_3
 \,
                                 ,'result_table' => 'hadoop_write_3'
                                 ,'sql' => q\select name, age, floor(gpa) + 0.1 from all100krc;\
@@ -210,7 +210,7 @@ create table hadoop_write_4(
 stored as sequencefile;
 \,
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.WriteRC -libjars :HCAT_JAR: :THRIFTSERVER: all100krc hadoop_write_4
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.WriteRC -libjars :HCAT_JAR: :THRIFTSERVER: all100krc hadoop_write_4
 \,
                                 ,'result_table' => 'hadoop_write_4'
                                 ,'sql' => q\select name, age, floor(gpa) + 0.1 from all100krc;\
@@ -225,9 +225,9 @@ jar :FUNCPATH:/testudf.jar org.apache.hc
                                 {
                                  'num' => 1
                                 ,'hcat_prep'=>q\drop table if exists hadoop_hbase_1;
-create table hadoop_hbase_1(key string, gpa string) STORED BY 'org.apache.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:gpa');\
+create table hadoop_hbase_1(key string, gpa string) STORED BY 'org.apache.hive.hcatalog.hbase.HBaseHCatStorageHandler' TBLPROPERTIES ('hbase.columns.mapping'=':key,info:gpa');\
                                 ,'hadoop' => q\
-jar :FUNCPATH:/testudf.jar org.apache.hcatalog.utils.HBaseReadWrite -libjars :HCAT_JAR: :THRIFTSERVER: :INPATH:/studenttab10k hadoop_hbase_1 :OUTPATH:
+jar :FUNCPATH:/testudf.jar org.apache.hive.hcatalog.utils.HBaseReadWrite -libjars :HCAT_JAR: :THRIFTSERVER: :INPATH:/studenttab10k hadoop_hbase_1 :OUTPATH:
 \,
                                 ,'sql' => q\select name, sum(gpa) from studenttab10k group by name;\
                                 ,'floatpostprocess' => 1

Modified: hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/tests/hive.conf
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/tests/hive.conf?rev=1522098&r1=1522097&r2=1522098&view=diff
==============================================================================
--- hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/tests/hive.conf (original)
+++ hive/branches/vectorization/hcatalog/src/test/e2e/hcatalog/tests/hive.conf Thu Sep 12 01:21:10 2013
@@ -167,7 +167,7 @@ insert into TABLE hive_write_1 select t,
                         'num' => 2,
                          'sql' => q\
 drop table if exists hive_write_2;
-create table hive_write_2 (name string, age int, gpa double) row format serde 'org.apache.hcatalog.data.JsonSerDe' stored as textfile;
+create table hive_write_2 (name string, age int, gpa double) row format serde 'org.apache.hive.hcatalog.data.JsonSerDe' stored as textfile;
 insert into TABLE hive_write_2 select s, i, 0.1 from all100kjson;\,
                          'result_table' => 'hive_write_2',
                          'verify_sql' =>"select s, i, 0.1 from all100kjson;",