You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/07/29 23:08:19 UTC

svn commit: r1508202 [2/48] - in /hive/branches/tez: ./ beeline/src/java/org/apache/hive/beeline/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/metrics/ common/src/java/org/apache/hadoop/hive/conf/ common/src/te...

Propchange: hive/branches/tez/
------------------------------------------------------------------------------
  Merged /hive/branches/branch-0.11:r1505184
  Merged /hive/trunk:r1494760-1508198

Modified: hive/branches/tez/.gitignore
URL: http://svn.apache.org/viewvc/hive/branches/tez/.gitignore?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/.gitignore (original)
+++ hive/branches/tez/.gitignore Mon Jul 29 21:08:03 2013
@@ -16,4 +16,4 @@ common/src/gen
 ql/derby.log
 derby.log
 .arc
-
+ql/TempStatsStore

Modified: hive/branches/tez/RELEASE_NOTES.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/RELEASE_NOTES.txt?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/RELEASE_NOTES.txt (original)
+++ hive/branches/tez/RELEASE_NOTES.txt Mon Jul 29 21:08:03 2013
@@ -15,6 +15,7 @@ Release Notes - Hive - Version 0.11.0
     * [HIVE-4326] - Clean up remaining items in hive/hcatalog/historical/trunk
 
 ** Bug
+    * [HIVE-4820] - webhcat_config.sh should set default values for HIVE_HOME and HCAT_PREFIX that work with default build tree structure
     * [HIVE-2264] - Hive server is SHUTTING DOWN when invalid queries beeing executed.
     * [HIVE-2332] - If all of the parameters of distinct functions are exists in group by columns, query fails in runtime
     * [HIVE-2689] - ObjectInspectorConverters cannot convert Void types to Array/Map/Struct types.

Modified: hive/branches/tez/beeline/src/java/org/apache/hive/beeline/BeeLine.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/beeline/src/java/org/apache/hive/beeline/BeeLine.properties?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/beeline/src/java/org/apache/hive/beeline/BeeLine.properties (original)
+++ hive/branches/tez/beeline/src/java/org/apache/hive/beeline/BeeLine.properties Mon Jul 29 21:08:03 2013
@@ -141,6 +141,8 @@ cmd-usage: Usage: java org.apache.hive.c
 \  -n <username>                   the username to connect as\n \
 \  -p <password>                   the password to connect as\n \
 \  -d <driver class>               the driver class to use\n \
+\  -e <query>                      query that should be executed\n \
+\  -f <file>                       script file that should be executed\n \
 \  --color=[true/false]            control whether color is used for display\n \
 \  --showHeader=[true/false]       show column names in query results\n \
 \  --headerInterval=ROWS;          the interval between which heades are displayed\n \

Modified: hive/branches/tez/build-common.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/build-common.xml?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/build-common.xml (original)
+++ hive/branches/tez/build-common.xml Mon Jul 29 21:08:03 2013
@@ -59,7 +59,7 @@
   <property name="test.output" value="true"/>
   <property name="test.junit.output.format" value="xml"/>
   <property name="test.junit.output.usefile" value="true"/>
-  <property name="minimr.query.files" value="list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,schemeAuthority.q,truncate_column_buckets.q,remote_script.q,load_hdfs_file_with_space_in_the_name.q"/>
+  <property name="minimr.query.files" value="list_bucket_dml_10.q,input16_cc.q,scriptfile1.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q,bucketmapjoin7.q,optrstat_groupby.q,bucket_num_reducers.q,bucket5.q,load_fs2.q,bucket_num_reducers2.q,infer_bucket_sort_merge.q,infer_bucket_sort_reducers_power_two.q,infer_bucket_sort_dyn_part.q,infer_bucket_sort_bucketed_table.q,infer_bucket_sort_map_operators.q,infer_bucket_sort_num_buckets.q,leftsemijoin_mr.q,schemeAuthority.q,truncate_column_buckets.q,remote_script.q,,load_hdfs_file_with_space_in_the_name.q,parallel_orderby.q"/>
   <property name="minimr.query.negative.files" value="cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q" />
   <property name="test.silent" value="true"/>
   <property name="hadoopVersion" value="${hadoop.version.ant-internal}"/>

Modified: hive/branches/tez/build.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/build.properties?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/build.properties (original)
+++ hive/branches/tez/build.properties Mon Jul 29 21:08:03 2013
@@ -29,7 +29,7 @@ javac.args.warnings=
 
 hadoop-0.20.version=0.20.2
 hadoop-0.20S.version=1.1.2
-hadoop-0.23.version=2.0.3-alpha
+hadoop-0.23.version=2.0.5-alpha
 hadoop.version=${hadoop-0.20.version}
 hadoop.security.version=${hadoop-0.20S.version}
 # Used to determine which set of Hadoop artifacts we depend on.
@@ -73,12 +73,30 @@ jsp.test.jar=${hadoop.root}/lib/jetty-ex
 common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
 
 # module names needed for build process
-iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
-iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
-iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service,hcatalog
-iterate.hive.thrift=ql,service,metastore,serde
-iterate.hive.protobuf=ql
-iterate.hive.cpp=odbc
+
+# full profile
+iterate.hive.full.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
+iterate.hive.full.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
+iterate.hive.full.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service,hcatalog
+iterate.hive.full.thrift=ql,service,metastore,serde
+iterate.hive.full.protobuf=ql
+iterate.hive.full.cpp=odbc
+
+# no hcatalog profile
+iterate.hive.nohcat.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils
+iterate.hive.nohcat.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils
+iterate.hive.nohcat.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service
+iterate.hive.nohcat.thrift=ql,service,metastore,serde
+iterate.hive.nohcat.protobuf=ql
+iterate.hive.nohcat.cpp=odbc
+
+# core profile
+iterate.hive.core.all=ant,shims,common,serde,metastore,ql,cli
+iterate.hive.core.modules=shims,common,serde,metastore,ql,cli
+iterate.hive.core.tests=ql
+iterate.hive.core.thrift=ql
+iterate.hive.core.protobuf=ql
+iterate.hive.core.cpp=
 
 #
 # Test Properties
@@ -88,7 +106,7 @@ iterate.hive.cpp=odbc
 # (measured in milliseconds). Ignored if fork is disabled. When running
 # multiple tests inside the same Java VM (see forkMode), timeout
 # applies to the time that all tests use together, not to an individual test.
-test.junit.timeout=43200000
+test.junit.timeout=86400000
 
 # Use this property to selectively disable tests from the command line:
 # ant test -Dtest.junit.exclude="**/TestCliDriver.class"

Modified: hive/branches/tez/build.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/build.xml?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/build.xml (original)
+++ hive/branches/tez/build.xml Mon Jul 29 21:08:03 2013
@@ -59,6 +59,51 @@
   <property name="rat.build.dir" location="${build.dir.hive}/rat"/>
   <property name="md5sum.format" value="{0}  {1}"/>
 
+  <taskdef resource="net/sf/antcontrib/antcontrib.properties">
+    <classpath>
+      <pathelement location="${hive.root}/testlibs/ant-contrib-1.0b3.jar"/>
+    </classpath>
+  </taskdef>
+
+  <if>
+    <equals arg1="${build.profile}" arg2="core"/>
+    <then>
+      <property name="iterate.hive.all" value="${iterate.hive.core.all}"/>
+      <property name="iterate.hive.modules" value="${iterate.hive.core.modules}"/>
+      <property name="iterate.hive.tests" value="${iterate.hive.core.tests}"/>
+      <property name="iterate.hive.thrift" value="${iterate.hive.core.thrift}"/>
+      <property name="iterate.hive.protobuf" value="${iterate.hive.core.protobuf}"/>
+      <property name="iterate.hive.cpp" value="${iterate.hive.core.cpp}"/>
+    </then>
+  </if>
+
+  <if>
+    <equals arg1="${build.profile}" arg2="nohcat"/>
+    <then>
+      <property name="iterate.hive.all" value="${iterate.hive.nohcat.all}"/>
+      <property name="iterate.hive.modules" value="${iterate.hive.nohcat.modules}"/>
+      <property name="iterate.hive.tests" value="${iterate.hive.nohcat.tests}"/>
+      <property name="iterate.hive.thrift" value="${iterate.hive.nohcat.thrift}"/>
+      <property name="iterate.hive.protobuf" value="${iterate.hive.nohcat.protobuf}"/>
+      <property name="iterate.hive.cpp" value="${iterate.hive.nohcat.cpp}"/>
+    </then>
+  </if>
+
+  <if>
+    <or>
+      <equals arg1="${build.profile}" arg2="full"/>
+      <not><isset property="build.profile"/></not>
+    </or>
+    <then>
+      <property name="iterate.hive.all" value="${iterate.hive.full.all}"/>
+      <property name="iterate.hive.modules" value="${iterate.hive.full.modules}"/>
+      <property name="iterate.hive.tests" value="${iterate.hive.full.tests}"/>
+      <property name="iterate.hive.thrift" value="${iterate.hive.full.thrift}"/>
+      <property name="iterate.hive.protobuf" value="${iterate.hive.full.protobuf}"/>
+      <property name="iterate.hive.cpp" value="${iterate.hive.full.cpp}"/>
+    </then>
+  </if>
+
   <!-- Check minimum ant version required -->
   <fail message="Please use ant version 1.8.0 or greater for building hive.">
     <condition>
@@ -477,18 +522,18 @@
         <exclude name="**/hive-anttasks*.jar"/>
         <exclude name="**/hive-testutils*.jar"/>
       </fileset>
-      <fileset file="${build.dir.hive}/beeline/hive-beeline-${version}.jar"/>
-      <fileset file="${build.dir.hive}/cli/hive-cli-${version}.jar"/>
-      <fileset file="${build.dir.hive}/common/hive-common-${version}.jar"/>
-      <fileset file="${build.dir.hive}/ql/hive-exec-${version}.jar"/>
-      <fileset file="${build.dir.hive}/metastore/hive-metastore-${version}.jar"/>
-      <fileset file="${build.dir.hive}/hwi/hive-hwi-${version}.war"/>
-      <fileset file="${build.dir.hive}/contrib/hive-contrib-${version}.jar"/>
+      <fileset file="${build.dir.hive}/beeline/hive-beeline-${version}.jar" erroronmissingdir="false"/>
+      <fileset file="${build.dir.hive}/cli/hive-cli-${version}.jar" erroronmissingdir="false"/>
+      <fileset file="${build.dir.hive}/common/hive-common-${version}.jar" erroronmissingdir="false"/>
+      <fileset file="${build.dir.hive}/ql/hive-exec-${version}.jar" erroronmissingdir="false"/>
+      <fileset file="${build.dir.hive}/metastore/hive-metastore-${version}.jar" erroronmissingdir="false"/>
+      <fileset file="${build.dir.hive}/hwi/hive-hwi-${version}.war" erroronmissingdir="false"/>
+      <fileset file="${build.dir.hive}/contrib/hive-contrib-${version}.jar" erroronmissingdir="false"/>
       <fileset dir="${build.dir.hive}/ivy/lib/default">
         <include name="*.jar"/>
         <exclude name="*.tar.gz"/>
         <exclude name="hadoop-*.jar" />
-	<exclude name="**/*high-scale-lib-*"/>
+        <exclude name="**/*high-scale-lib-*"/>
         <exclude name="**/hamcrest-core-*jar"/>
         <exclude name="**/junit*.jar"/>
         <exclude name="**/asm*.jar"/>
@@ -516,12 +561,27 @@
     </chmod>
 
     <!-- Package the hcat stuff and pull it up into Hive's build dir -->
-    <ant antfile="${hive.root}/hcatalog/build.xml" target="package"
-        inheritAll="false"/>
-    <mkdir dir="${target.dir}/hcatalog"/>
-    <copy todir="${target.dir}/hcatalog">
-        <fileset dir="${hive.root}/hcatalog/build/hcatalog-${hcatalog.version}"/>
-    </copy>
+    <if>
+      <matches string="${iterate.hive.all}" pattern="hcatalog"/>
+      <then>
+        <ant antfile="${hive.root}/hcatalog/build.xml" target="package"
+          inheritAll="false"/>
+        <mkdir dir="${target.dir}/hcatalog"/>
+        <copy todir="${target.dir}/hcatalog">
+          <fileset dir="${hive.root}/hcatalog/build/hcatalog-${hcatalog.version}"/>
+        </copy>
+      </then>
+    </if>
+    <!--fix permissions since 'copy' looses them (known ant/Java issue)-->
+    <if>
+      <matches string="${iterate.hive.all}" pattern="hcatalog"/>
+      <then>
+        <chmod perm="ugo+x" type="file">
+          <fileset dir="${target.dir}/hcatalog/bin"/>
+          <fileset dir="${target.dir}/hcatalog/sbin"/>
+        </chmod>
+      </then>
+    </if>
   </target>
 
 
@@ -752,6 +812,8 @@
         <tarfileset dir="${build.dir.hive}/dist" mode="755" prefix="${bin.final.name}"
                     excludes="${vcs.excludes}">
           <include name="bin/**"/>
+          <include name="hcatalog/bin/*"/>
+          <include name="hcatalog/sbin/*"/>
         </tarfileset>
         <tarfileset dir="${build.dir.hive}/dist" mode="755" prefix="${bin.final.name}"
                     excludes="${vcs.excludes}">
@@ -763,6 +825,8 @@
           <exclude name="bin/**"/>
           <exclude name="docs/**"/>
           <exclude name="lib/py/**/*-remote"/>
+          <exclude name="hcatalog/bin/*"/>
+          <exclude name="hcatalog/sbin/*"/>
         </tarfileset>
       </param.listofitems>
     </macro_tar>

Modified: hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/branches/tez/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Mon Jul 29 21:08:03 2013
@@ -52,9 +52,9 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.HadoopJobExecHelper;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.Utilities.StreamPrinter;
+import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/Metrics.java Mon Jul 29 21:08:03 2013
@@ -22,6 +22,7 @@ import java.lang.management.ManagementFa
 import java.util.HashMap;
 
 import javax.management.MBeanServer;
+import javax.management.MalformedObjectNameException;
 import javax.management.ObjectName;
 
 /**
@@ -39,32 +40,32 @@ import javax.management.ObjectName;
  */
 public class Metrics {
 
+  private Metrics() {
+    // block
+  }
+  
   /**
    * MetricsScope : A class that encapsulates an idea of a metered scope.
    * Instantiating a named scope and then closing it exposes two counters:
    *   (i) a "number of calls" counter ( &lt;name&gt;.n ), and
    *  (ii) a "number of msecs spent between scope open and close" counter. ( &lt;name&gt;.t)
    */
-  public class MetricsScope {
+  public static class MetricsScope {
 
-    String name = null;
-    boolean isOpen = false;
-    Long startTime = null;
-    String numCounter = null;
-    String timeCounter = null;
-    String avgTimeCounter = null;
-
-    //disable default ctor - so that it can't be created without a name
-    @SuppressWarnings("unused")
-    private MetricsScope() {
-    }
+    final String name;
+    final String numCounter;
+    final String timeCounter;
+    final String avgTimeCounter;
+    
+    private boolean isOpen = false;
+    private Long startTime = null;
 
     /**
      * Instantiates a named scope - intended to only be called by Metrics, so locally scoped.
      * @param name - name of the variable
      * @throws IOException
      */
-    MetricsScope(String name) throws IOException {
+    private MetricsScope(String name) throws IOException {
       this.name = name;
       this.numCounter = name + ".n";
       this.timeCounter = name + ".t";
@@ -128,28 +129,36 @@ public class Metrics {
 
   }
 
+  private static final MetricsMBean metrics = new MetricsMBeanImpl();
 
-  static MetricsMBean metrics = new MetricsMBeanImpl();
-
-  static ThreadLocal<HashMap<String, MetricsScope>> threadLocalScopes
+  private static final ObjectName oname;
+  static {
+    try {
+      oname = new ObjectName(
+          "org.apache.hadoop.hive.common.metrics:type=MetricsMBean");      
+    } catch (MalformedObjectNameException mone) {
+      throw new RuntimeException(mone);
+    }
+  }
+  
+  
+  private static final ThreadLocal<HashMap<String, MetricsScope>> threadLocalScopes
     = new ThreadLocal<HashMap<String,MetricsScope>>() {
     @Override
-    protected synchronized HashMap<String,MetricsScope> initialValue() {
+    protected HashMap<String,MetricsScope> initialValue() {
       return new HashMap<String,MetricsScope>();
     }
   };
 
-  static boolean initialized = false;
-
-  static Metrics m = new Metrics();
+  private static boolean initialized = false;
 
   public static void init() throws Exception {
-    if (!initialized) {
-      MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
-      ObjectName oname = new ObjectName(
-        "org.apache.hadoop.hive.common.metrics:type=MetricsMBean");
-      mbs.registerMBean(metrics, oname);
-      initialized = true;
+    synchronized (metrics) {
+      if (!initialized) {
+        MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+        mbs.registerMBean(metrics, oname);
+        initialized = true;
+      }
     }
   }
 
@@ -181,9 +190,7 @@ public class Metrics {
     if (!initialized) {
       return;
     }
-    synchronized(metrics) {
-      metrics.put(name,value);
-    }
+    metrics.put(name,value);
   }
 
   public static Object get(String name) throws IOException{
@@ -200,7 +207,7 @@ public class Metrics {
     if (threadLocalScopes.get().containsKey(name)) {
       threadLocalScopes.get().get(name).open();
     } else {
-      threadLocalScopes.get().put(name, m.new MetricsScope(name));
+      threadLocalScopes.get().put(name, new MetricsScope(name));
     }
     return threadLocalScopes.get().get(name);
   }
@@ -225,4 +232,22 @@ public class Metrics {
     }
   }
 
+  /**
+   * Resets the static context state to initial.
+   * Used primarily for testing purposes.
+   * 
+   * Note that threadLocalScopes ThreadLocal is *not* cleared in this call.
+   */
+  static void uninit() throws Exception {
+    synchronized (metrics) {
+      if (initialized) {
+        MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
+        if (mbs.isRegistered(oname)) {
+          mbs.unregisterMBean(oname);
+        }
+        metrics.clear();
+        initialized = false;
+      }
+    }
+  }
 }

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBean.java Mon Jul 29 21:08:03 2013
@@ -47,5 +47,10 @@ public interface MetricsMBean extends Dy
      * @throws Exception
      */
     public abstract Object get(String name) throws IOException;
+    
 
+    /**
+     * Removes all the keys and values from this MetricsMBean. 
+     */
+    void clear();
 }

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/common/metrics/MetricsMBeanImpl.java Mon Jul 29 21:08:03 2013
@@ -36,15 +36,15 @@ import javax.management.ReflectionExcept
 
 public class MetricsMBeanImpl implements  MetricsMBean {
 
-    Map<String,Object> metricsMap = new HashMap<String,Object>();
+    private final Map<String,Object> metricsMap = new HashMap<String,Object>();
 
-    MBeanAttributeInfo[] attributeInfos;
-    boolean dirtyAttributeInfoCache = true;
+    private MBeanAttributeInfo[] attributeInfos;
+    private boolean dirtyAttributeInfoCache = true;
 
-    MBeanConstructorInfo[] ctors = null;
-    MBeanOperationInfo[] ops = {new MBeanOperationInfo("reset",
+    private static final MBeanConstructorInfo[] ctors = null;
+    private static final MBeanOperationInfo[] ops = {new MBeanOperationInfo("reset",
         "Sets the values of all Attributes to 0", null, "void", MBeanOperationInfo.ACTION)};
-    MBeanNotificationInfo[] notifs = null;
+    private static final MBeanNotificationInfo[] notifs = null;
 
     @Override
     public Object getAttribute(String arg0) throws AttributeNotFoundException,
@@ -77,7 +77,7 @@ public class MetricsMBeanImpl implements
             int i = 0;
             for (String key : metricsMap.keySet()) {
               attributeInfos[i] = new MBeanAttributeInfo(
-                  key, metricsMap.get(key).getClass().getName(), key, true, false, false);
+                  key, metricsMap.get(key).getClass().getName(), key, true, true/*writable*/, false);
               i++;
             }
             dirtyAttributeInfoCache = false;
@@ -129,12 +129,14 @@ public class MetricsMBeanImpl implements
         return attributesSet;
     }
 
+    @Override
     public boolean hasKey(String name) {
       synchronized(metricsMap) {
         return metricsMap.containsKey(name);
       }
     }
 
+    @Override
     public void put(String name, Object value) throws IOException {
       synchronized(metricsMap) {
         if (!metricsMap.containsKey(name)) {
@@ -144,6 +146,7 @@ public class MetricsMBeanImpl implements
       }
     }
 
+    @Override
     public Object get(String name) throws IOException {
         try {
           return getAttribute(name);
@@ -163,4 +166,13 @@ public class MetricsMBeanImpl implements
         }
       }
     }
+    
+    @Override
+    public void clear() {
+      synchronized(metricsMap) {
+        attributeInfos = null;
+        dirtyAttributeInfoCache = true;
+        metricsMap.clear();
+      }
+    }
 }

Modified: hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/branches/tez/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Mon Jul 29 21:08:03 2013
@@ -178,10 +178,12 @@ public class HiveConf extends Configurat
     SCRIPTWRAPPER("hive.exec.script.wrapper", null),
     PLAN("hive.exec.plan", ""),
     SCRATCHDIR("hive.exec.scratchdir", "/tmp/hive-" + System.getProperty("user.name")),
-    LOCALSCRATCHDIR("hive.exec.local.scratchdir", "/tmp/" + System.getProperty("user.name")),
+    LOCALSCRATCHDIR("hive.exec.local.scratchdir", System.getProperty("java.io.tmpdir") + File.separator + System.getProperty("user.name")),
     SUBMITVIACHILD("hive.exec.submitviachild", false),
     SCRIPTERRORLIMIT("hive.exec.script.maxerrsize", 100000),
     ALLOWPARTIALCONSUMP("hive.exec.script.allow.partial.consumption", false),
+    STREAMREPORTERPERFIX("stream.stderr.reporter.prefix", "reporter:"),
+    STREAMREPORTERENABLED("stream.stderr.reporter.enabled", true),
     COMPRESSRESULT("hive.exec.compress.output", false),
     COMPRESSINTERMEDIATE("hive.exec.compress.intermediate", false),
     COMPRESSINTERMEDIATECODEC("hive.intermediate.compression.codec", ""),
@@ -202,7 +204,7 @@ public class HiveConf extends Configurat
     DYNAMICPARTITIONMAXPARTSPERNODE("hive.exec.max.dynamic.partitions.pernode", 100),
     MAXCREATEDFILES("hive.exec.max.created.files", 100000L),
     DOWNLOADED_RESOURCES_DIR("hive.downloaded.resources.dir",
-        "/tmp/${hive.session.id}_resources"),
+        System.getProperty("java.io.tmpdir") + File.separator  + "${hive.session.id}_resources"),
     DEFAULTPARTITIONNAME("hive.exec.default.partition.name", "__HIVE_DEFAULT_PARTITION__"),
     DEFAULT_ZOOKEEPER_PARTITION_NAME("hive.lockmgr.zookeeper.default.partition.name", "__HIVE_DEFAULT_ZOOKEEPER_PARTITION__"),
     // Whether to show a link to the most failed task + debugging tips
@@ -314,7 +316,7 @@ public class HiveConf extends Configurat
     METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_ZK_ACL(
         "hive.cluster.delegation.token.store.zookeeper.acl", ""),
     METASTORE_CACHE_PINOBJTYPES("hive.metastore.cache.pinobjtypes", "Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order"),
-    METASTORE_CONNECTION_POOLING_TYPE("datanucleus.connectionPoolingType", "DBCP"),
+    METASTORE_CONNECTION_POOLING_TYPE("datanucleus.connectionPoolingType", "BONECP"),
     METASTORE_VALIDATE_TABLES("datanucleus.validateTables", false),
     METASTORE_VALIDATE_COLUMNS("datanucleus.validateColumns", false),
     METASTORE_VALIDATE_CONSTRAINTS("datanucleus.validateConstraints", false),
@@ -324,7 +326,8 @@ public class HiveConf extends Configurat
     METASTORE_TRANSACTION_ISOLATION("datanucleus.transactionIsolation", "read-committed"),
     METASTORE_CACHE_LEVEL2("datanucleus.cache.level2", false),
     METASTORE_CACHE_LEVEL2_TYPE("datanucleus.cache.level2.type", "none"),
-    METASTORE_IDENTIFIER_FACTORY("datanucleus.identifierFactory", "datanucleus"),
+    METASTORE_IDENTIFIER_FACTORY("datanucleus.identifierFactory", "datanucleus1"),
+    METASTORE_USE_LEGACY_VALUE_STRATEGY("datanucleus.rdbms.useLegacyNativeValueStrategy", true),
     METASTORE_PLUGIN_REGISTRY_BUNDLE_CHECK("datanucleus.plugin.pluginRegistryBundleCheck", "LOG"),
     METASTORE_BATCH_RETRIEVE_MAX("hive.metastore.batch.retrieve.max", 300),
     METASTORE_BATCH_RETRIEVE_TABLE_PARTITION_MAX(
@@ -353,7 +356,7 @@ public class HiveConf extends Configurat
     METASTORE_CONNECTION_DRIVER("javax.jdo.option.ConnectionDriverName",
         "org.apache.derby.jdbc.EmbeddedDriver"),
     METASTORE_MANAGER_FACTORY_CLASS("javax.jdo.PersistenceManagerFactoryClass",
-        "org.datanucleus.jdo.JDOPersistenceManagerFactory"),
+        "org.datanucleus.api.jdo.JDOPersistenceManagerFactory"),
     METASTORE_DETACH_ALL_ON_COMMIT("javax.jdo.option.DetachAllOnCommit", true),
     METASTORE_NON_TRANSACTIONAL_READ("javax.jdo.option.NonTransactionalRead", true),
     METASTORE_CONNECTION_USER_NAME("javax.jdo.option.ConnectionUserName", "APP"),
@@ -442,11 +445,11 @@ public class HiveConf extends Configurat
     HIVECHECKFILEFORMAT("hive.fileformat.check", true),
 
     // default serde for rcfile
-    HIVEDEFAULTRCFILESERDE("hive.default.rcfile.serde", 
+    HIVEDEFAULTRCFILESERDE("hive.default.rcfile.serde",
                            "org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe"),
 
     //Location of Hive run time structured log file
-    HIVEHISTORYFILELOC("hive.querylog.location", "/tmp/" + System.getProperty("user.name")),
+    HIVEHISTORYFILELOC("hive.querylog.location", System.getProperty("java.io.tmpdir") + File.separator + System.getProperty("user.name")),
 
     // Whether to log the plan's progress every time a job's progress is checked
     HIVE_LOG_INCREMENTAL_PLAN_PROGRESS("hive.querylog.enable.plan.progress", true),
@@ -560,10 +563,16 @@ public class HiveConf extends Configurat
     HIVEOPTSORTMERGEBUCKETMAPJOIN("hive.optimize.bucketmapjoin.sortedmerge", false), // try to use sorted merge bucket map join
     HIVEOPTREDUCEDEDUPLICATION("hive.optimize.reducededuplication", true),
     HIVEOPTREDUCEDEDUPLICATIONMINREDUCER("hive.optimize.reducededuplication.min.reducer", 4),
+
+    HIVESAMPLINGFORORDERBY("hive.optimize.sampling.orderby", false),
+    HIVESAMPLINGNUMBERFORORDERBY("hive.optimize.sampling.orderby.number", 1000),
+    HIVESAMPLINGPERCENTFORORDERBY("hive.optimize.sampling.orderby.percent", 0.1f),
+
     // whether to optimize union followed by select followed by filesink
     // It creates sub-directories in the final output, so should not be turned on in systems
     // where MAPREDUCE-1501 is not present
     HIVE_OPTIMIZE_UNION_REMOVE("hive.optimize.union.remove", false),
+    HIVEOPTCORRELATION("hive.optimize.correlation", false), // exploit intra-query correlations
 
     // whether hadoop map-reduce supports sub-directories. It was added by MAPREDUCE-1501.
     // Some optimizations can only be performed if the version of hadoop being used supports

Modified: hive/branches/tez/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/tez/conf/hive-default.xml.template?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/conf/hive-default.xml.template (original)
+++ hive/branches/tez/conf/hive-default.xml.template Mon Jul 29 21:08:03 2013
@@ -141,7 +141,7 @@
 
 <property>
   <name>javax.jdo.PersistenceManagerFactoryClass</name>
-  <value>org.datanucleus.jdo.JDOPersistenceManagerFactory</value>
+  <value>org.datanucleus.api.jdo.JDOPersistenceManagerFactory</value>
   <description>class implementing the jdo persistence</description>
 </property>
 
@@ -237,8 +237,8 @@
 
 <property>
   <name>datanucleus.identifierFactory</name>
-  <value>datanucleus</value>
-  <description>Name of the identifier factory to use when generating table/column names etc. 'datanucleus' is used for backward compatibility</description>
+  <value>datanucleus1</value>
+  <description>Name of the identifier factory to use when generating table/column names etc. 'datanucleus1' is used for backward compatibility with DataNucleus v1</description>
 </property>
 
 <property>
@@ -293,7 +293,7 @@
   <name>hive.metastore.disallow.incompatible.col.type.change</name>
   <value></value>
   <description>If true (default is false), ALTER TABLE operations which change the type of 
-    a column (say STRING) to an incompatible type (say MAP<STRING, STRING>) are disallowed.  
+    a column (say STRING) to an incompatible type (say MAP&lt;STRING, STRING&gt;) are disallowed.  
     RCFile default serde (ColumnarSerde) serializes the values in such a way that the
     datatypes can be converted from string to any type. The map is also serialized as
     a string, which can be read as a string as well. However, with any binary 
@@ -896,6 +896,18 @@
 </property>
 
 <property>
+  <name>stream.stderr.reporter.prefix</name>
+  <value>reporter:</value>
+  <description>Streaming jobs that log to stardard error with this prefix can log counter or status information.</description>
+</property>
+
+<property>
+  <name>stream.stderr.reporter.enabled</name>
+  <value>true</value>
+  <description>Enable consumption of status and counter messages for streaming jobs.</description>
+</property>
+
+<property>
   <name>hive.script.recordwriter</name>
   <value>org.apache.hadoop.hive.ql.exec.TextRecordWriter</value>
   <description>The default record writer for writing data to the user scripts. </description>
@@ -1121,6 +1133,12 @@
 </property>
 
 <property>
+  <name>hive.optimize.correlation</name>
+  <value>false</value>
+  <description>exploit intra-query correlations.</description>
+</property>
+
+<property>
   <name>hive.optimize.reducededuplication.min.reducer</name>
   <value>4</value>
   <description>Reduce deduplication merges two RSs by moving key/parts/reducer-num of the child RS to parent RS.

Modified: hive/branches/tez/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out Mon Jul 29 21:08:03 2013
@@ -25,4 +25,4 @@ Task ID:
 Logs:
 
 #### A masked pattern was here ####
-FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.MapRedTask
+FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask

Modified: hive/branches/tez/contrib/src/test/results/clientnegative/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientnegative/serde_regex.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientnegative/serde_regex.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientnegative/serde_regex.q.out Mon Jul 29 21:08:03 2013
@@ -80,5 +80,4 @@ WITH SERDEPROPERTIES (
 )
 STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE
-#### A masked pattern was here ####
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. java.lang.RuntimeException: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException org.apache.hadoop.hive.contrib.serde2.RegexSerDe only accepts string columns, but column[5] named status has type int)

Modified: hive/branches/tez/contrib/src/test/results/clientnegative/url_hook.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientnegative/url_hook.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientnegative/url_hook.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientnegative/url_hook.q.out Mon Jul 29 21:08:03 2013
@@ -5,5 +5,4 @@ POSTHOOK: type: SHOWTABLES
 src
 PREHOOK: query: SHOW TABLES 'src'
 PREHOOK: type: SHOWTABLES
-FAILED: Error in metadata: ERROR: The database default does not exist.
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Database does not exist: default

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/dboutput.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/dboutput.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/dboutput.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/dboutput.q.out Mon Jul 29 21:08:03 2013
@@ -141,7 +141,7 @@ STAGE PLANS:
             alias: src
             Filter Operator
               predicate:
-                  expr: (key < 10.0)
+                  expr: (key < 10)
                   type: boolean
               Select Operator
                 expressions:

Modified: hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (original)
+++ hive/branches/tez/contrib/src/test/results/clientpositive/serde_typedbytes4.q.out Mon Jul 29 21:08:03 2013
@@ -48,7 +48,7 @@ STAGE PLANS:
             alias: src
             Filter Operator
               predicate:
-                  expr: (key < 100.0)
+                  expr: (key < 100)
                   type: boolean
               Select Operator
                 expressions:

Modified: hive/branches/tez/data/files/datatypes.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/data/files/datatypes.txt?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/data/files/datatypes.txt (original)
+++ hive/branches/tez/data/files/datatypes.txt Mon Jul 29 21:08:03 2013
@@ -1,3 +1,3 @@
-\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N
--1false-1.1\N\N\N-1-1-1.0-1\N\N\N\N\N
-1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22 09:00:00.123456789123456789.0123456YWJjZA==
+\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N
+-1false-1.1\N\N\N-1-1-1.0-1\N\N\N\N\N\N
+1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22 09:00:00.123456789123456789.0123456YWJjZA==2013-01-01

Modified: hive/branches/tez/data/files/kv1kv2.cogroup.txt
URL: http://svn.apache.org/viewvc/hive/branches/tez/data/files/kv1kv2.cogroup.txt?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/data/files/kv1kv2.cogroup.txt (original)
+++ hive/branches/tez/data/files/kv1kv2.cogroup.txt Mon Jul 29 21:08:03 2013
@@ -1,1000 +1,1000 @@
-0val_0
-0val_0
-0val_0
-10
-10
-11
-0val_10
-110
-0val_100
-0val_100
-1100
-1100
-1101
-1102
-0val_103
-0val_103
-0val_104
-0val_104
-1104
-1104
-1104
-0val_105
-1105
-1105
-1106
-0val_11
-111
-111
-111
-1110
-0val_111
-0val_113
-0val_113
-0val_114
-1114
-1114
-1114
-0val_116
-1116
-1117
-1117
-0val_118
-0val_118
-1118
-1118
-1118
-0val_119
-0val_119
-0val_119
-1119
-1119
-1119
-0val_12
-0val_12
-112
-0val_120
-0val_120
-1120
-1121
-1121
-1122
-1122
-1122
-1123
-1123
-0val_125
-0val_125
-1125
-0val_126
-1126
-1126
-0val_128
-0val_128
-0val_128
-1128
-1128
-0val_129
-0val_129
-1129
-1129
-0val_131
-1132
-1132
-0val_133
-1133
-0val_134
-0val_134
-1134
-1135
-1135
-1135
-0val_136
-1136
-0val_137
-0val_137
-1137
-0val_138
-0val_138
-0val_138
-0val_138
-1138
-1138
-1140
-0val_143
-1143
-1144
-0val_145
-0val_146
-0val_146
-1147
-1147
-0val_149
-0val_149
-1149
-0val_15
-0val_15
-115
-115
-0val_150
-1151
-1151
-0val_152
-0val_152
-1152
-1152
-1152
-0val_153
-1153
-1153
-0val_155
-0val_156
-1156
-1156
-0val_157
-1157
-1157
-0val_158
-116
-116
-0val_160
-1160
-1161
-1161
-1161
-1161
-0val_162
-1162
-0val_163
-0val_164
-0val_164
-1164
-1164
-0val_165
-0val_165
-1165
-0val_166
-0val_167
-0val_167
-0val_167
-1167
-0val_168
-1168
-0val_169
-0val_169
-0val_169
-0val_169
-0val_17
-0val_170
-1170
-0val_172
-0val_172
-1172
-0val_174
-0val_174
-1174
-1174
-0val_175
-0val_175
-1175
-1175
-0val_176
-0val_176
-0val_177
-1177
-1177
-0val_178
-1178
-1178
-0val_179
-0val_179
-1179
-0val_18
-0val_18
-0val_180
-0val_181
-1182
-0val_183
-1183
-1184
-1185
-0val_186
-0val_187
-0val_187
-0val_187
-0val_189
-1189
-0val_19
-119
-0val_190
-0val_191
-0val_191
-1191
-0val_192
-1192
-0val_193
-0val_193
-0val_193
-0val_194
-0val_195
-0val_195
-0val_196
-1196
-1196
-1196
-0val_197
-0val_197
-1197
-0val_199
-0val_199
-0val_199
-1199
-0val_2
-12
-0val_20
-120
-120
-0val_200
-0val_200
-0val_201
-0val_202
-0val_203
-0val_203
-1204
-0val_205
-0val_205
-1205
-1206
-1206
-1206
-0val_207
-0val_207
-0val_208
-0val_208
-0val_208
-0val_209
-0val_209
-1209
-1209
-121
-121
-121
-121
-1212
-0val_213
-0val_213
-1213
-0val_214
-1215
-0val_216
-0val_216
-1216
-0val_217
-0val_217
-1217
-1217
-0val_218
-0val_219
-0val_219
-122
-0val_221
-0val_221
-0val_222
-1222
-0val_223
-0val_223
-0val_224
-0val_224
-1224
-0val_226
-1226
-1226
-1226
-1226
-1227
-0val_228
-1228
-0val_229
-0val_229
-123
-0val_230
-0val_230
-0val_230
-0val_230
-0val_230
-1231
-0val_233
-0val_233
-0val_235
-1235
-0val_237
-0val_237
-0val_238
-0val_238
-1238
-0val_239
-0val_239
-1239
-1239
-0val_24
-0val_24
-1240
-0val_241
-1241
-1241
-1241
-1241
-0val_242
-0val_242
-1242
-1243
-1243
-0val_244
-1244
-1244
-1244
-1245
-1245
-1246
-1246
-0val_247
-0val_248
-1248
-0val_249
-1249
-1249
-0val_252
-1252
-1254
-0val_255
-0val_255
-0val_256
-0val_256
-1256
-0val_257
-1257
-1257
-0val_258
-1258
-1259
-1259
-0val_26
-0val_26
-0val_260
-1260
-1260
-1261
-0val_262
-1262
-1262
-0val_263
-1264
-1264
-0val_265
-0val_265
-1265
-0val_266
-1267
-1268
-0val_27
-1271
-0val_272
-0val_272
-1272
-0val_273
-0val_273
-0val_273
-1273
-0val_274
-1274
-0val_275
-1275
-1275
-1276
-0val_277
-0val_277
-0val_277
-0val_277
-1277
-1277
-0val_278
-0val_278
-1278
-0val_28
-0val_280
-0val_280
-0val_281
-0val_281
-1281
-1281
-1281
-0val_282
-0val_282
-0val_283
-0val_284
-1284
-1284
-0val_285
-1285
-0val_286
-1286
-0val_287
-1287
-1287
-0val_288
-0val_288
-0val_289
-1289
-129
-129
-0val_291
-1291
-1291
-0val_292
-1292
-1292
-1293
-1293
-1295
-1295
-0val_296
-1296
-0val_298
-0val_298
-0val_298
-13
-0val_30
-130
-1300
-1300
-0val_302
-1302
-1303
-1303
-1304
-0val_305
-1305
-0val_306
-1306
-0val_307
-0val_307
-0val_308
-1308
-1308
-0val_309
-0val_309
-1309
-131
-0val_310
-1310
-1310
-1310
-0val_311
-0val_311
-0val_311
-1313
-1314
-0val_315
-0val_316
-0val_316
-0val_316
-0val_317
-0val_317
-1317
-0val_318
-0val_318
-0val_318
-1318
-132
-0val_321
-0val_321
-0val_322
-0val_322
-1322
-0val_323
-1323
-1324
-0val_325
-0val_325
-1326
-0val_327
-0val_327
-0val_327
-1328
-1328
-0val_33
-133
-1330
-0val_331
-0val_331
-1331
-1331
-0val_332
-0val_333
-0val_333
-1333
-1334
-0val_335
-1335
-1335
-0val_336
-1336
-1337
-0val_338
-1338
-0val_339
-0val_34
-1340
-0val_341
-1341
-1341
-1341
-0val_342
-0val_342
-1342
-1343
-0val_344
-0val_344
-1344
-0val_345
-1347
-1347
-0val_348
-0val_348
-0val_348
-0val_348
-0val_348
-1348
-1349
-1349
-1349
-1349
-0val_35
-0val_35
-0val_35
-135
-135
-135
-0val_351
-1351
-1351
-1352
-1352
-0val_353
-0val_353
-1353
-1355
-1355
-0val_356
-1356
-1356
-1358
-0val_360
-1360
-0val_362
-1363
-1363
-1363
-0val_364
-1364
-0val_365
-0val_366
-0val_367
-0val_367
-1367
-1367
-0val_368
-0val_369
-0val_369
-0val_369
-1369
-0val_37
-0val_37
-1371
-1371
-1371
-1371
-0val_373
-1373
-0val_374
-1374
-0val_375
-1375
-1375
-1375
-1375
-1375
-1376
-0val_377
-0val_378
-1378
-0val_379
-1379
-1381
-0val_382
-0val_382
-1382
-1382
-0val_384
-0val_384
-0val_384
-1384
-1384
-1384
-1385
-1385
-0val_386
-1386
-1386
-1388
-0val_389
-1389
-1389
-1390
-1390
-1390
-1391
-1391
-0val_392
-1392
-1392
-0val_393
-1393
-1393
-0val_394
-0val_395
-0val_395
-1395
-1395
-0val_396
-0val_396
-0val_396
-0val_397
-0val_397
-1398
-0val_399
-0val_399
-1399
-1399
-0val_4
-14
-140
-140
-0val_400
-0val_401
-0val_401
-0val_401
-0val_401
-0val_401
-1401
-0val_402
-1402
-1402
-1402
-0val_403
-0val_403
-0val_403
-0val_404
-0val_404
-1404
-1404
-1404
-1405
-0val_406
-0val_406
-0val_406
-0val_406
-1406
-0val_407
-1407
-1407
-1407
-1408
-1408
-0val_409
-0val_409
-0val_409
-1409
-1409
-0val_41
-1410
-0val_411
-1411
-1412
-1412
-0val_413
-0val_413
-1413
-0val_414
-0val_414
-1414
-1415
-1416
-0val_417
-0val_417
-0val_417
-0val_418
-0val_419
-0val_42
-0val_42
-142
-142
-142
-0val_421
-1421
-1421
-1423
-0val_424
-0val_424
-1424
-1425
-1426
-0val_427
-1427
-1427
-1428
-0val_429
-0val_429
-1429
-1429
-0val_43
-0val_430
-0val_430
-0val_430
-1430
-0val_431
-0val_431
-0val_431
-1431
-0val_432
-1432
-0val_435
-1435
-0val_436
-1436
-0val_437
-1437
-0val_438
-0val_438
-0val_438
-1438
-1438
-0val_439
-0val_439
-1439
-1439
-0val_44
-1440
-1440
-1441
-1442
-0val_443
-1443
-1443
-1443
-0val_444
-0val_446
-1446
-1446
-1447
-0val_448
-1448
-0val_449
-1450
-1450
-1451
-0val_452
-0val_453
-1453
-0val_454
-0val_454
-0val_454
-1454
-1454
-0val_455
-1455
-1455
-0val_457
-1457
-1457
-0val_458
-0val_458
-0val_459
-0val_459
-1459
-146
-0val_460
-1461
-0val_462
-0val_462
-1462
-0val_463
-0val_463
-1463
-0val_466
-0val_466
-0val_466
-0val_467
-1467
-0val_468
-0val_468
-0val_468
-0val_468
-1468
-1468
-1468
-0val_469
-0val_469
-0val_469
-0val_469
-0val_469
-1469
-0val_47
-147
-0val_470
-1470
-0val_472
-1473
-1474
-1474
-0val_475
-1475
-1476
-1476
-0val_477
-1477
-0val_478
-0val_478
-1478
-1478
-0val_479
-148
-148
-0val_480
-0val_480
-0val_480
-1480
-1480
-0val_481
-1481
-0val_482
-1482
-0val_483
-0val_484
-1484
-0val_485
-1485
-1485
-1486
-0val_487
-1487
-1488
-0val_489
-0val_489
-0val_489
-0val_489
-1489
-149
-149
-0val_490
-1490
-0val_491
-1491
-1491
-0val_492
-0val_492
-1492
-1492
-0val_493
-0val_494
-1494
-1494
-0val_495
-1495
-0val_496
-1496
-0val_497
-1497
-1497
-0val_498
-0val_498
-0val_498
-0val_5
-0val_5
-0val_5
-15
-150
-0val_51
-0val_51
-151
-152
-152
-152
-152
-0val_53
-153
-0val_54
-156
-0val_57
-0val_58
-0val_58
-158
-158
-159
-16
-16
-160
-161
-162
-162
-163
-0val_64
-0val_65
-165
-165
-0val_66
-0val_67
-0val_67
-168
-0val_69
-169
-0val_70
-0val_70
-0val_70
-170
-171
-0val_72
-0val_72
-0val_74
-175
-0val_76
-0val_76
-176
-176
-176
-0val_77
-177
-177
-0val_78
-178
-0val_8
-18
-0val_80
-180
-0val_82
-182
-182
-0val_83
-0val_83
-0val_84
-0val_84
-0val_85
-185
-0val_86
-186
-0val_87
-187
-187
-189
-189
-189
-0val_9
-0val_90
-0val_90
-0val_90
-191
-0val_92
-193
-193
-193
-194
-0val_95
-0val_95
-0val_96
-0val_97
-0val_97
-197
-197
-0val_98
-0val_98
-199
+val_0
+val_0
+val_0
+0
+0
+1
+val_10
+10
+val_100
+val_100
+100
+100
+101
+102
+val_103
+val_103
+val_104
+val_104
+104
+104
+104
+val_105
+105
+105
+106
+val_11
+11
+11
+11
+110
+val_111
+val_113
+val_113
+val_114
+114
+114
+114
+val_116
+116
+117
+117
+val_118
+val_118
+118
+118
+118
+val_119
+val_119
+val_119
+119
+119
+119
+val_12
+val_12
+12
+val_120
+val_120
+120
+121
+121
+122
+122
+122
+123
+123
+val_125
+val_125
+125
+val_126
+126
+126
+val_128
+val_128
+val_128
+128
+128
+val_129
+val_129
+129
+129
+val_131
+132
+132
+val_133
+133
+val_134
+val_134
+134
+135
+135
+135
+val_136
+136
+val_137
+val_137
+137
+val_138
+val_138
+val_138
+val_138
+138
+138
+140
+val_143
+143
+144
+val_145
+val_146
+val_146
+147
+147
+val_149
+val_149
+149
+val_15
+val_15
+15
+15
+val_150
+151
+151
+val_152
+val_152
+152
+152
+152
+val_153
+153
+153
+val_155
+val_156
+156
+156
+val_157
+157
+157
+val_158
+16
+16
+val_160
+160
+161
+161
+161
+161
+val_162
+162
+val_163
+val_164
+val_164
+164
+164
+val_165
+val_165
+165
+val_166
+val_167
+val_167
+val_167
+167
+val_168
+168
+val_169
+val_169
+val_169
+val_169
+val_17
+val_170
+170
+val_172
+val_172
+172
+val_174
+val_174
+174
+174
+val_175
+val_175
+175
+175
+val_176
+val_176
+val_177
+177
+177
+val_178
+178
+178
+val_179
+val_179
+179
+val_18
+val_18
+val_180
+val_181
+182
+val_183
+183
+184
+185
+val_186
+val_187
+val_187
+val_187
+val_189
+189
+val_19
+19
+val_190
+val_191
+val_191
+191
+val_192
+192
+val_193
+val_193
+val_193
+val_194
+val_195
+val_195
+val_196
+196
+196
+196
+val_197
+val_197
+197
+val_199
+val_199
+val_199
+199
+val_2
+2
+val_20
+20
+20
+val_200
+val_200
+val_201
+val_202
+val_203
+val_203
+204
+val_205
+val_205
+205
+206
+206
+206
+val_207
+val_207
+val_208
+val_208
+val_208
+val_209
+val_209
+209
+209
+21
+21
+21
+21
+212
+val_213
+val_213
+213
+val_214
+215
+val_216
+val_216
+216
+val_217
+val_217
+217
+217
+val_218
+val_219
+val_219
+22
+val_221
+val_221
+val_222
+222
+val_223
+val_223
+val_224
+val_224
+224
+val_226
+226
+226
+226
+226
+227
+val_228
+228
+val_229
+val_229
+23
+val_230
+val_230
+val_230
+val_230
+val_230
+231
+val_233
+val_233
+val_235
+235
+val_237
+val_237
+val_238
+val_238
+238
+val_239
+val_239
+239
+239
+val_24
+val_24
+240
+val_241
+241
+241
+241
+241
+val_242
+val_242
+242
+243
+243
+val_244
+244
+244
+244
+245
+245
+246
+246
+val_247
+val_248
+248
+val_249
+249
+249
+val_252
+252
+254
+val_255
+val_255
+val_256
+val_256
+256
+val_257
+257
+257
+val_258
+258
+259
+259
+val_26
+val_26
+val_260
+260
+260
+261
+val_262
+262
+262
+val_263
+264
+264
+val_265
+val_265
+265
+val_266
+267
+268
+val_27
+271
+val_272
+val_272
+272
+val_273
+val_273
+val_273
+273
+val_274
+274
+val_275
+275
+275
+276
+val_277
+val_277
+val_277
+val_277
+277
+277
+val_278
+val_278
+278
+val_28
+val_280
+val_280
+val_281
+val_281
+281
+281
+281
+val_282
+val_282
+val_283
+val_284
+284
+284
+val_285
+285
+val_286
+286
+val_287
+287
+287
+val_288
+val_288
+val_289
+289
+29
+29
+val_291
+291
+291
+val_292
+292
+292
+293
+293
+295
+295
+val_296
+296
+val_298
+val_298
+val_298
+3
+val_30
+30
+300
+300
+val_302
+302
+303
+303
+304
+val_305
+305
+val_306
+306
+val_307
+val_307
+val_308
+308
+308
+val_309
+val_309
+309
+31
+val_310
+310
+310
+310
+val_311
+val_311
+val_311
+313
+314
+val_315
+val_316
+val_316
+val_316
+val_317
+val_317
+317
+val_318
+val_318
+val_318
+318
+32
+val_321
+val_321
+val_322
+val_322
+322
+val_323
+323
+324
+val_325
+val_325
+326
+val_327
+val_327
+val_327
+328
+328
+val_33
+33
+330
+val_331
+val_331
+331
+331
+val_332
+val_333
+val_333
+333
+334
+val_335
+335
+335
+val_336
+336
+337
+val_338
+338
+val_339
+val_34
+340
+val_341
+341
+341
+341
+val_342
+val_342
+342
+343
+val_344
+val_344
+344
+val_345
+347
+347
+val_348
+val_348
+val_348
+val_348
+val_348
+348
+349
+349
+349
+349
+val_35
+val_35
+val_35
+35
+35
+35
+val_351
+351
+351
+352
+352
+val_353
+val_353
+353
+355
+355
+val_356
+356
+356
+358
+val_360
+360
+val_362
+363
+363
+363
+val_364
+364
+val_365
+val_366
+val_367
+val_367
+367
+367
+val_368
+val_369
+val_369
+val_369
+369
+val_37
+val_37
+371
+371
+371
+371
+val_373
+373
+val_374
+374
+val_375
+375
+375
+375
+375
+375
+376
+val_377
+val_378
+378
+val_379
+379
+381
+val_382
+val_382
+382
+382
+val_384
+val_384
+val_384
+384
+384
+384
+385
+385
+val_386
+386
+386
+388
+val_389
+389
+389
+390
+390
+390
+391
+391
+val_392
+392
+392
+val_393
+393
+393
+val_394
+val_395
+val_395
+395
+395
+val_396
+val_396
+val_396
+val_397
+val_397
+398
+val_399
+val_399
+399
+399
+val_4
+4
+40
+40
+val_400
+val_401
+val_401
+val_401
+val_401
+val_401
+401
+val_402
+402
+402
+402
+val_403
+val_403
+val_403
+val_404
+val_404
+404
+404
+404
+405
+val_406
+val_406
+val_406
+val_406
+406
+val_407
+407
+407
+407
+408
+408
+val_409
+val_409
+val_409
+409
+409
+val_41
+410
+val_411
+411
+412
+412
+val_413
+val_413
+413
+val_414
+val_414
+414
+415
+416
+val_417
+val_417
+val_417
+val_418
+val_419
+val_42
+val_42
+42
+42
+42
+val_421
+421
+421
+423
+val_424
+val_424
+424
+425
+426
+val_427
+427
+427
+428
+val_429
+val_429
+429
+429
+val_43
+val_430
+val_430
+val_430
+430
+val_431
+val_431
+val_431
+431
+val_432
+432
+val_435
+435
+val_436
+436
+val_437
+437
+val_438
+val_438
+val_438
+438
+438
+val_439
+val_439
+439
+439
+val_44
+440
+440
+441
+442
+val_443
+443
+443
+443
+val_444
+val_446
+446
+446
+447
+val_448
+448
+val_449
+450
+450
+451
+val_452
+val_453
+453
+val_454
+val_454
+val_454
+454
+454
+val_455
+455
+455
+val_457
+457
+457
+val_458
+val_458
+val_459
+val_459
+459
+46
+val_460
+461
+val_462
+val_462
+462
+val_463
+val_463
+463
+val_466
+val_466
+val_466
+val_467
+467
+val_468
+val_468
+val_468
+val_468
+468
+468
+468
+val_469
+val_469
+val_469
+val_469
+val_469
+469
+val_47
+47
+val_470
+470
+val_472
+473
+474
+474
+val_475
+475
+476
+476
+val_477
+477
+val_478
+val_478
+478
+478
+val_479
+48
+48
+val_480
+val_480
+val_480
+480
+480
+val_481
+481
+val_482
+482
+val_483
+val_484
+484
+val_485
+485
+485
+486
+val_487
+487
+488
+val_489
+val_489
+val_489
+val_489
+489
+49
+49
+val_490
+490
+val_491
+491
+491
+val_492
+val_492
+492
+492
+val_493
+val_494
+494
+494
+val_495
+495
+val_496
+496
+val_497
+497
+497
+val_498
+val_498
+val_498
+val_5
+val_5
+val_5
+5
+50
+val_51
+val_51
+51
+52
+52
+52
+52
+val_53
+53
+val_54
+56
+val_57
+val_58
+val_58
+58
+58
+59
+6
+6
+60
+61
+62
+62
+63
+val_64
+val_65
+65
+65
+val_66
+val_67
+val_67
+68
+val_69
+69
+val_70
+val_70
+val_70
+70
+71
+val_72
+val_72
+val_74
+75
+val_76
+val_76
+76
+76
+76
+val_77
+77
+77
+val_78
+78
+val_8
+8
+val_80
+80
+val_82
+82
+82
+val_83
+val_83
+val_84
+val_84
+val_85
+85
+val_86
+86
+val_87
+87
+87
+89
+89
+89
+val_9
+val_90
+val_90
+val_90
+91
+val_92
+93
+93
+93
+94
+val_95
+val_95
+val_96
+val_97
+val_97
+97
+97
+val_98
+val_98
+99

Modified: hive/branches/tez/eclipse-templates/.classpath
URL: http://svn.apache.org/viewvc/hive/branches/tez/eclipse-templates/.classpath?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/eclipse-templates/.classpath (original)
+++ hive/branches/tez/eclipse-templates/.classpath Mon Jul 29 21:08:03 2013
@@ -67,14 +67,13 @@
   <classpathentry kind="lib" path="build/ivy/lib/default/antlr-@antlr.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/antlr-runtime-@antlr-runtime.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/junit-@junit.version@.jar"/>
-  <classpathentry kind="lib" path="build/ivy/lib/default/jdo2-api-@jdo-api.version@.jar"/>
+  <classpathentry kind="lib" path="build/ivy/lib/default/jdo-api-@jdo-api.version@.jar"/>
+  <classpathentry kind="lib" path="build/ivy/lib/default/datanucleus-api-jdo-@datanucleus-api-jdo.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/datanucleus-core-@datanucleus-core.version@.jar"/>
-  <classpathentry kind="lib" path="build/ivy/lib/default/datanucleus-enhancer-@datanucleus-enhancer.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/datanucleus-rdbms-@datanucleus-rdbms.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/commons-cli-@commons-cli.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/commons-collections-@commons-collections.version@.jar"/>
-  <classpathentry kind="lib" path="build/ivy/lib/default/commons-dbcp-@commons-dbcp.version@.jar"/>
-  <classpathentry kind="lib" path="build/ivy/lib/default/datanucleus-connectionpool-@datanucleus-connectionpool.version@.jar"/>
+  <classpathentry kind="lib" path="build/ivy/lib/default/bonecp-@BoneCP.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/commons-pool-@commons-pool.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/slf4j-api-@slf4j-api.version@.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/slf4j-log4j12-@slf4j-log4j12.version@.jar"/>

Modified: hive/branches/tez/eclipse-templates/.classpath._hbase
URL: http://svn.apache.org/viewvc/hive/branches/tez/eclipse-templates/.classpath._hbase?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/eclipse-templates/.classpath._hbase (original)
+++ hive/branches/tez/eclipse-templates/.classpath._hbase Mon Jul 29 21:08:03 2013
@@ -32,9 +32,9 @@
 	<classpathentry exported="true" kind="lib" path="lib/commons-logging-@commons-logging.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/commons-logging-api-@commons-logging-api.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/derby.jar"/>
-	<classpathentry exported="true" kind="lib" path="lib/jdo2-api-@jdo2-api.version@.jar"/>
+	<classpathentry exported="true" kind="lib" path="lib/jdo-api-@jdo-api.version@.jar"/>
+	<classpathentry exported="true" kind="lib" path="lib/datanucleus-api-jdo-@datanucleus-api-jdo.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/datanucleus-core-@datanucleus-core.version@.jar"/>
-	<classpathentry exported="true" kind="lib" path="lib/datanucleus-enhancer-@datanucleus-enhancer.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/datanucleus-rdbms-@datanucleus-rdbms.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/thrift-fb303-@thrift-fb303.version@.jar"/>
 	<classpathentry exported="true" kind="lib" path="lib/thrift-@thrift.version@.jar"/>

Modified: hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java (original)
+++ hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java Mon Jul 29 21:08:03 2013
@@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.util.Byte
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
@@ -66,10 +65,19 @@ public class HBaseSerDe extends Abstract
   public static final String HBASE_TABLE_DEFAULT_STORAGE_TYPE = "hbase.table.default.storage.type";
   public static final String HBASE_KEY_COL = ":key";
   public static final String HBASE_PUT_TIMESTAMP = "hbase.put.timestamp";
+  public static final String HBASE_SCAN_CACHE = "hbase.scan.cache";
+  public static final String HBASE_SCAN_CACHEBLOCKS = "hbase.scan.cacheblock";
+  public static final String HBASE_SCAN_BATCH = "hbase.scan.batch";
+  
+  /** Determines whether a regex matching should be done on the columns or not. Defaults to true. 
+   *  <strong>WARNING: Note that currently this only supports the suffix wildcard .*</strong> **/
+  public static final String HBASE_COLUMNS_REGEX_MATCHING = "hbase.columns.mapping.regex.matching";
+
   public static final Log LOG = LogFactory.getLog(HBaseSerDe.class);
 
   private ObjectInspector cachedObjectInspector;
   private String hbaseColumnsMapping;
+  private boolean doColumnRegexMatching;
   private List<ColumnMapping> columnsMapping;
   private SerDeParameters serdeParams;
   private boolean useJSONSerialize;
@@ -144,6 +152,21 @@ public class HBaseSerDe extends Abstract
    */
   public static List<ColumnMapping> parseColumnsMapping(String columnsMappingSpec)
       throws SerDeException {
+    return parseColumnsMapping(columnsMappingSpec, true);
+  }
+
+  /**
+   * Parses the HBase columns mapping specifier to identify the column families, qualifiers
+   * and also caches the byte arrays corresponding to them. One of the Hive table
+   * columns maps to the HBase row key, by default the first column.
+   *
+   * @param columnsMappingSpec string hbase.columns.mapping specified when creating table
+   * @param doColumnRegexMatching whether to do a regex matching on the columns or not
+   * @return List<ColumnMapping> which contains the column mapping information by position
+   * @throws SerDeException
+   */
+  public static List<ColumnMapping> parseColumnsMapping(String columnsMappingSpec, boolean doColumnRegexMatching)
+      throws SerDeException {
 
     if (columnsMappingSpec == null) {
       throw new SerDeException("Error: hbase.columns.mapping missing for this HBase table.");
@@ -189,8 +212,21 @@ public class HBaseSerDe extends Abstract
         columnMapping.hbaseRowKey = false;
 
         if (parts.length == 2) {
-          columnMapping.qualifierName = parts[1];
-          columnMapping.qualifierNameBytes = Bytes.toBytes(parts[1]);
+
+          if (doColumnRegexMatching && parts[1].endsWith(".*")) {
+            // we have a prefix with a wildcard
+            columnMapping.qualifierPrefix = parts[1].substring(0, parts[1].length() - 2);
+            columnMapping.qualifierPrefixBytes = Bytes.toBytes(columnMapping.qualifierPrefix);
+            // we weren't provided any actual qualifier name. Set these to
+            // null.
+            columnMapping.qualifierName = null;
+            columnMapping.qualifierNameBytes = null;
+          } else {
+            // set the regular provided qualifier names
+            columnMapping.qualifierName = parts[1];
+            columnMapping.qualifierNameBytes = Bytes.toBytes(parts[1]);
+            ;
+          }
         } else {
           columnMapping.qualifierName = null;
           columnMapping.qualifierNameBytes = null;
@@ -409,6 +445,8 @@ public class HBaseSerDe extends Abstract
     List<Boolean> binaryStorage;
     boolean hbaseRowKey;
     String mappingSpec;
+    String qualifierPrefix;
+    byte[] qualifierPrefixBytes;
   }
 
   private void initHBaseSerDeParameters(
@@ -420,8 +458,10 @@ public class HBaseSerDe extends Abstract
     String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
     putTimestamp = Long.valueOf(tbl.getProperty(HBaseSerDe.HBASE_PUT_TIMESTAMP,"-1"));
 
+    doColumnRegexMatching = Boolean.valueOf(tbl.getProperty(HBASE_COLUMNS_REGEX_MATCHING, "true"));
+
     // Parse and initialize the HBase columns mapping
-    columnsMapping = parseColumnsMapping(hbaseColumnsMapping);
+    columnsMapping = parseColumnsMapping(hbaseColumnsMapping, doColumnRegexMatching);
 
     // Build the type property string if not supplied
     if (columnTypeProperty == null) {
@@ -794,6 +834,7 @@ public class HBaseSerDe extends Abstract
     return columnsMapping.get(colPos).binaryStorage;
   }
 
+  @Override
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Mon Jul 29 21:08:03 2013
@@ -39,9 +39,9 @@ import org.apache.hadoop.hbase.util.Byte
 import org.apache.hadoop.hive.hbase.HBaseSerDe.ColumnMapping;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
-import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.index.IndexPredicateAnalyzer;
 import org.apache.hadoop.hive.ql.index.IndexSearchCondition;
 import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
@@ -279,8 +279,22 @@ public class HBaseStorageHandler extends
     jobProperties.put(
       HBaseSerDe.HBASE_COLUMNS_MAPPING,
       tableProperties.getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING));
+    jobProperties.put(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING,
+        tableProperties.getProperty(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, "true"));
     jobProperties.put(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE,
       tableProperties.getProperty(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE,"string"));
+    String scanCache = tableProperties.getProperty(HBaseSerDe.HBASE_SCAN_CACHE);
+    if (scanCache != null) {
+      jobProperties.put(HBaseSerDe.HBASE_SCAN_CACHE, scanCache);
+    }
+    String scanCacheBlocks = tableProperties.getProperty(HBaseSerDe.HBASE_SCAN_CACHEBLOCKS);
+    if (scanCacheBlocks != null) {
+      jobProperties.put(HBaseSerDe.HBASE_SCAN_CACHEBLOCKS, scanCacheBlocks);
+    }
+    String scanBatch = tableProperties.getProperty(HBaseSerDe.HBASE_SCAN_BATCH);
+    if (scanBatch != null) {
+      jobProperties.put(HBaseSerDe.HBASE_SCAN_BATCH, scanBatch);
+    }
 
     String tableName =
       tableProperties.getProperty(HBaseSerDe.HBASE_TABLE_NAME);
@@ -299,7 +313,7 @@ public class HBaseStorageHandler extends
     try {
       TableMapReduceUtil.addDependencyJars(jobConf);
       org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(jobConf,
-          HBaseStorageHandler.class);
+          HBaseStorageHandler.class, org.apache.hadoop.hbase.HBaseConfiguration.class);
     } catch (IOException e) {
       throw new RuntimeException(e);
     }

Modified: hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java (original)
+++ hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java Mon Jul 29 21:08:03 2013
@@ -91,11 +91,12 @@ public class HiveHBaseTableInputFormat e
     String hbaseTableName = jobConf.get(HBaseSerDe.HBASE_TABLE_NAME);
     setHTable(new HTable(HBaseConfiguration.create(jobConf), Bytes.toBytes(hbaseTableName)));
     String hbaseColumnsMapping = jobConf.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
+    boolean doColumnRegexMatching = jobConf.getBoolean(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, true);
     List<Integer> readColIDs = ColumnProjectionUtils.getReadColumnIDs(jobConf);
     List<ColumnMapping> columnsMapping = null;
 
     try {
-      columnsMapping = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
+      columnsMapping = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping, doColumnRegexMatching);
     } catch (SerDeException e) {
       throw new IOException(e);
     }
@@ -149,6 +150,19 @@ public class HiveHBaseTableInputFormat e
       }
     }
 
+    String scanCache = jobConf.get(HBaseSerDe.HBASE_SCAN_CACHE);
+    if (scanCache != null) {
+      scan.setCaching(Integer.valueOf(scanCache));
+    }
+    String scanCacheBlocks = jobConf.get(HBaseSerDe.HBASE_SCAN_CACHEBLOCKS);
+    if (scanCacheBlocks != null) {
+      scan.setCacheBlocks(Boolean.valueOf(scanCacheBlocks));
+    }
+    String scanBatch = jobConf.get(HBaseSerDe.HBASE_SCAN_BATCH);
+    if (scanBatch != null) {
+      scan.setBatch(Integer.valueOf(scanBatch));
+    }
+
     // If Hive's optimizer gave us a filter to process, convert it to the
     // HBase scan form now.
     int iKey = -1;
@@ -421,6 +435,7 @@ public class HiveHBaseTableInputFormat e
     String hbaseTableName = jobConf.get(HBaseSerDe.HBASE_TABLE_NAME);
     setHTable(new HTable(HBaseConfiguration.create(jobConf), Bytes.toBytes(hbaseTableName)));
     String hbaseColumnsMapping = jobConf.get(HBaseSerDe.HBASE_COLUMNS_MAPPING);
+    boolean doColumnRegexMatching = jobConf.getBoolean(HBaseSerDe.HBASE_COLUMNS_REGEX_MATCHING, true);
 
     if (hbaseColumnsMapping == null) {
       throw new IOException("hbase.columns.mapping required for HBase Table.");
@@ -428,7 +443,7 @@ public class HiveHBaseTableInputFormat e
 
     List<ColumnMapping> columnsMapping = null;
     try {
-      columnsMapping = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping);
+      columnsMapping = HBaseSerDe.parseColumnsMapping(hbaseColumnsMapping,doColumnRegexMatching);
     } catch (SerDeException e) {
       throw new IOException(e);
     }

Modified: hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseCellMap.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseCellMap.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseCellMap.java (original)
+++ hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseCellMap.java Mon Jul 29 21:08:03 2013
@@ -21,10 +21,11 @@ package org.apache.hadoop.hive.hbase;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.NavigableMap;
 import java.util.Map.Entry;
+import java.util.NavigableMap;
 
 import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazyMap;
@@ -42,6 +43,7 @@ public class LazyHBaseCellMap extends La
 
   private Result result;
   private byte [] columnFamilyBytes;
+  private byte[] qualPrefix;
   private List<Boolean> binaryStorage;
 
   /**
@@ -54,12 +56,21 @@ public class LazyHBaseCellMap extends La
 
   public void init(
       Result r,
-      byte [] columnFamilyBytes,
+      byte[] columnFamilyBytes,
       List<Boolean> binaryStorage) {
 
+    init(r, columnFamilyBytes, binaryStorage, null);
+  }
+
+  public void init(
+      Result r,
+      byte [] columnFamilyBytes,
+      List<Boolean> binaryStorage, byte[] qualPrefix) {
+
     result = r;
     this.columnFamilyBytes = columnFamilyBytes;
     this.binaryStorage = binaryStorage;
+    this.qualPrefix = qualPrefix;
     setParsed(false);
   }
 
@@ -80,6 +91,12 @@ public class LazyHBaseCellMap extends La
           continue;
         }
 
+        if (qualPrefix != null && !Bytes.startsWith(e.getKey(), qualPrefix)) {
+          // since we were provided a qualifier prefix, only accept qualifiers that start with this
+          // prefix
+          continue;
+        }
+
         LazyMapObjectInspector lazyMoi = getInspector();
 
         // Keys are always primitive

Modified: hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java (original)
+++ hive/branches/tez/hbase-handler/src/java/org/apache/hadoop/hive/hbase/LazyHBaseRow.java Mon Jul 29 21:08:03 2013
@@ -142,9 +142,11 @@ public class LazyHBaseRow extends LazySt
       } else {
         if (colMap.qualifierName == null) {
           // it is a column family
-          // primitive type for Map<Key, Value> can be stored in binary format
+          // primitive type for Map<Key, Value> can be stored in binary format. Pass in the
+          // qualifier prefix to cherry pick the qualifiers that match the prefix instead of picking
+          // up everything
           ((LazyHBaseCellMap) fields[fieldID]).init(
-              result, colMap.familyNameBytes, colMap.binaryStorage);
+              result, colMap.familyNameBytes, colMap.binaryStorage, colMap.qualifierPrefixBytes);
         } else {
           // it is a column i.e. a column-family with column-qualifier
           byte [] res = result.getValue(colMap.familyNameBytes, colMap.qualifierNameBytes);

Modified: hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java (original)
+++ hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java Mon Jul 29 21:08:03 2013
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.hbase;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -32,6 +33,7 @@ import org.apache.hadoop.hbase.client.Re
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
@@ -688,4 +690,123 @@ public class TestHBaseSerDe extends Test
     Put serializedPut = (Put) hbaseSerDe.serialize(row, soi);
     assertEquals("Serialized data: ", p.toString(), serializedPut.toString());
   }
+
+  public void testHBaseSerDeWithColumnPrefixes()
+      throws Exception {
+    byte[] cfa = "cola".getBytes();
+
+    byte[] qualA = "prefixA_col1".getBytes();
+    byte[] qualB = "prefixB_col2".getBytes();
+    byte[] qualC = "prefixB_col3".getBytes();
+    byte[] qualD = "unwanted_col".getBytes();
+
+    List<Object> qualifiers = new ArrayList<Object>();
+    qualifiers.add(new Text("prefixA_col1"));
+    qualifiers.add(new Text("prefixB_col2"));
+    qualifiers.add(new Text("prefixB_col3"));
+    qualifiers.add(new Text("unwanted_col"));
+
+    List<Object> expectedQualifiers = new ArrayList<Object>();
+    expectedQualifiers.add(new Text("prefixA_col1"));
+    expectedQualifiers.add(new Text("prefixB_col2"));
+    expectedQualifiers.add(new Text("prefixB_col3"));
+
+    byte[] rowKey = Bytes.toBytes("test-row1");
+
+    // Data
+    List<KeyValue> kvs = new ArrayList<KeyValue>();
+
+    byte[] dataA = "This is first test data".getBytes();
+    byte[] dataB = "This is second test data".getBytes();
+    byte[] dataC = "This is third test data".getBytes();
+    byte[] dataD = "Unwanted data".getBytes();
+
+    kvs.add(new KeyValue(rowKey, cfa, qualA, dataA));
+    kvs.add(new KeyValue(rowKey, cfa, qualB, dataB));
+    kvs.add(new KeyValue(rowKey, cfa, qualC, dataC));
+    kvs.add(new KeyValue(rowKey, cfa, qualD, dataD));
+
+    Result r = new Result(kvs);
+
+    Put p = new Put(rowKey);
+
+    p.add(new KeyValue(rowKey, cfa, qualA, dataA));
+    p.add(new KeyValue(rowKey, cfa, qualB, dataB));
+    p.add(new KeyValue(rowKey, cfa, qualC, dataC));
+
+    Object[] expectedFieldsData = {
+        new Text("test-row1"),
+        new String("This is first test data"),
+        new String("This is second test data"),
+        new String("This is third test data")};
+
+    int[] expectedMapSize = new int[] {1, 2};
+
+    // Create, initialize, and test the SerDe
+    HBaseSerDe serDe = new HBaseSerDe();
+    Configuration conf = new Configuration();
+    Properties tbl = createPropertiesForColumnPrefixes();
+    serDe.initialize(conf, tbl);
+
+    Object notPresentKey = new Text("unwanted_col");
+
+    deserializeAndSerializeHivePrefixColumnFamily(serDe, r, p, expectedFieldsData, expectedMapSize,
+        expectedQualifiers,
+        notPresentKey);
+  }
+
+  private Properties createPropertiesForColumnPrefixes() {
+    Properties tbl = new Properties();
+    tbl.setProperty(serdeConstants.LIST_COLUMNS,
+        "key,astring,along");
+    tbl.setProperty(serdeConstants.LIST_COLUMN_TYPES,
+        "string:map<string,string>:map<string,string>");
+    tbl.setProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING,
+        ":key,cola:prefixA_.*,cola:prefixB_.*");
+
+    return tbl;
+  }
+
+  private void deserializeAndSerializeHivePrefixColumnFamily(HBaseSerDe serDe, Result r, Put p,
+      Object[] expectedFieldsData, int[] expectedMapSize, List<Object> expectedQualifiers,
+      Object notPresentKey)
+      throws SerDeException, IOException {
+    StructObjectInspector soi = (StructObjectInspector) serDe.getObjectInspector();
+
+    List<? extends StructField> fieldRefs = soi.getAllStructFieldRefs();
+
+    Object row = serDe.deserialize(r);
+
+    int j = 0;
+
+    for (int i = 0; i < fieldRefs.size(); i++) {
+      Object fieldData = soi.getStructFieldData(row, fieldRefs.get(i));
+      assertNotNull(fieldData);
+
+      if (fieldData instanceof LazyPrimitive<?, ?>) {
+        assertEquals(expectedFieldsData[i], ((LazyPrimitive<?, ?>) fieldData).getWritableObject());
+      } else if (fieldData instanceof LazyHBaseCellMap) {
+        assertEquals(expectedFieldsData[i], ((LazyHBaseCellMap) fieldData)
+            .getMapValueElement(expectedQualifiers.get(j)).toString().trim());
+
+        assertEquals(expectedMapSize[j], ((LazyHBaseCellMap) fieldData).getMapSize());
+        // Make sure that the unwanted key is not present in the map
+        assertNull(((LazyHBaseCellMap) fieldData).getMapValueElement(notPresentKey));
+
+        j++;
+
+      } else {
+        fail("Error: field data not an instance of LazyPrimitive<?, ?> or LazyHBaseCellMap");
+      }
+    }
+
+    SerDeUtils.getJSONString(row, soi);
+
+    // Now serialize
+    Put put = (Put) serDe.serialize(row, soi);
+
+    if (p != null) {
+      assertEquals("Serialized put:", p.toString(), put.toString());
+    }
+  }
 }

Modified: hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java (original)
+++ hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestLazyHBaseObject.java Mon Jul 29 21:08:03 2013
@@ -57,8 +57,9 @@ import org.apache.hadoop.io.Writable;
 public class TestLazyHBaseObject extends TestCase {
   /**
    * Test the LazyMap class with Integer-to-String.
+   * @throws SerDeException
    */
-  public void testLazyHBaseCellMap1() {
+  public void testLazyHBaseCellMap1() throws SerDeException {
     // Map of Integer to String
     Text nullSequence = new Text("\\N");
     ObjectInspector oi = LazyFactory.createLazyObjectInspector(
@@ -118,8 +119,9 @@ public class TestLazyHBaseObject extends
 
   /**
    * Test the LazyMap class with String-to-String.
+   * @throws SerDeException
    */
-  public void testLazyHBaseCellMap2() {
+  public void testLazyHBaseCellMap2() throws SerDeException {
     // Map of String to String
     Text nullSequence = new Text("\\N");
     ObjectInspector oi = LazyFactory.createLazyObjectInspector(
@@ -180,8 +182,9 @@ public class TestLazyHBaseObject extends
   /**
    * Test the LazyHBaseCellMap class for the case where both the key and the value in the family
    * map are stored in binary format using the appropriate LazyPrimitive objects.
+   * @throws SerDeException
    */
-  public void testLazyHBaseCellMap3() {
+  public void testLazyHBaseCellMap3() throws SerDeException {
 
     Text nullSequence = new Text("\\N");
     TypeInfo mapBinaryIntKeyValue = TypeInfoUtils.getTypeInfoFromTypeString("map<int,int>");
@@ -450,8 +453,9 @@ public class TestLazyHBaseObject extends
   /**
    * Test the LazyHBaseRow class with one-for-one mappings between
    * Hive fields and HBase columns.
+   * @throws SerDeException
    */
-  public void testLazyHBaseRow1() {
+  public void testLazyHBaseRow1() throws SerDeException {
     List<TypeInfo> fieldTypeInfos =
       TypeInfoUtils.getTypeInfosFromTypeString(
           "string,int,array<string>,map<string,string>,string");
@@ -573,8 +577,9 @@ public class TestLazyHBaseObject extends
   /**
    * Test the LazyHBaseRow class with a mapping from a Hive field to
    * an HBase column family.
+   * @throws SerDeException
    */
-  public void testLazyHBaseRow2() {
+  public void testLazyHBaseRow2() throws SerDeException {
     // column family is mapped to Map<string,string>
     List<TypeInfo> fieldTypeInfos =
       TypeInfoUtils.getTypeInfosFromTypeString(
@@ -695,8 +700,9 @@ public class TestLazyHBaseObject extends
    * Test the LazyHBaseRow class with a one-to-one/onto mapping between Hive columns and
    * HBase column family/column qualifier pairs. The column types are primitive and fields
    * are stored in binary format in HBase.
+   * @throws SerDeException
    */
-  public void testLazyHBaseRow3() {
+  public void testLazyHBaseRow3() throws SerDeException {
 
     List<TypeInfo> fieldTypeInfos = TypeInfoUtils.getTypeInfosFromTypeString(
         "string,int,tinyint,smallint,bigint,float,double,string,boolean");

Modified: hive/branches/tez/hcatalog/bin/hcat
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/bin/hcat?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/bin/hcat (original)
+++ hive/branches/tez/hcatalog/bin/hcat Mon Jul 29 21:08:03 2013
@@ -34,8 +34,14 @@ done                                    
 bin=`dirname "$this"`                                                            
 script=`basename "$this"`                                                        
 bin=`unset CDPATH; cd "$bin"; pwd`                                               
-this="$bin/$script"                                                              
-
+this="$bin/$script"
+#to preserve value of 'this' since any other file that defines 'this' and is sourced
+#here (e.g. hcat-config.sh) will overwrite it
+this_hcat=$this
+
+function echoerr() {
+    echo "${this_hcat}: $@" 1>&2
+}
 
 if [ -e "$bin/../libexec/hcat-config.sh" ]; then
   . "$bin"/../libexec/hcat-config.sh
@@ -60,7 +66,8 @@ done
 
 # check for hive in the path
 HIVE_IN_PATH=`which hive 2>/dev/null`
-if [ -f ${HIVE_IN_PATH} ]; then
+# looks like [ -f '' ] is true...
+if [ -n ${HIVE_IN_PATH} ]; then
   #dir of hive scrip
   HIVE_DIR=`dirname "$HIVE_IN_PATH"`
   #one level up for base dir
@@ -70,8 +77,14 @@ fi
 # HIVE_HOME env variable overrides hive in the path
 HIVE_HOME=${HIVE_HOME:-$HIVE_DIR}
 
+#if hive is not in path and not set by env, set it to default in build tree
+if [ -n ${HIVE_HOME} ]; then
+  HIVE_HOME="${bin}/../.."
+  echoerr "HIVE_HOME is not defined; assuming ${HIVE_HOME}";
+fi
+
 if [ "$HIVE_HOME" == "" ]; then
-  echo "Cannot find hive installation: \$HIVE_HOME must be set or hive must be in the path";
+  echo "${this_hcat}: Cannot find hive installation: \$HIVE_HOME must be set or hive must be in the path";
   exit 4;
 fi
 
@@ -87,13 +100,13 @@ fi
 
 HIVE_LIB_DIR=${HIVE_HOME}/lib
 if [ ! -d "$HIVE_LIB_DIR" ]; then
-  echo "Cannot find lib dir within HIVE_HOME : $HIVE_LIB_DIR";
+  echo "${this_hcat}: Cannot find lib dir within HIVE_HOME : $HIVE_LIB_DIR";
   exit 4;
 fi
 
 HIVE_CONF_DIR=${HIVE_CONF_DIR:-$HIVE_HOME/conf}
 if [ ! -d "$HIVE_CONF_DIR" ]; then
-  echo "Cannot find conf dir within HIVE_HOME : $HIVE_CONF_DIR";
+  echo "${this_hcat}: Cannot find conf dir within HIVE_HOME : $HIVE_CONF_DIR";
   exit 4;
 fi
 

Modified: hive/branches/tez/hcatalog/build-support/ant/checkstyle.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/build-support/ant/checkstyle.xml?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/build-support/ant/checkstyle.xml (original)
+++ hive/branches/tez/hcatalog/build-support/ant/checkstyle.xml Mon Jul 29 21:08:03 2013
@@ -61,7 +61,11 @@
           <exclude name="src/test/e2e/hcatalog/testdist/**"/> <!-- Test run results --> 
           <exclude name="src/test/e2e/hcatalog/tar/**"/> <!-- Test build area --> 
           <exclude name="src/test/e2e/hcatalog/udfs/java/*.jar"/> <!-- Test build area --> 
-          <exclude name="src/test/e2e/hcatalog/hcattests.tar"/> <!-- Test build artifact --> 
+          <exclude name="src/test/e2e/hcatalog/hcattests.tar"/> <!-- Test build artifact -->
+          <exclude name="**/*.iml"/><!--intelliJ files-->
+          <exclude name="src/test/e2e/templeton/testdist/**"/> <!-- Test run results -->
+          <exclude name="src/test/e2e/templeton/tar/**"/> <!-- Test build area -->
+          <exclude name="src/test/e2e/templeton/hcattests.tar"/> <!-- Test build artifact -->
       </fileset>
       <formatter type="plain"/>
       <formatter type="xml" toFile="${build.dir}/checkstyle/checkstyle_result.xml"/>

Modified: hive/branches/tez/hcatalog/build-support/ant/deploy.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/build-support/ant/deploy.xml?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/build-support/ant/deploy.xml (original)
+++ hive/branches/tez/hcatalog/build-support/ant/deploy.xml Mon Jul 29 21:08:03 2013
@@ -69,7 +69,7 @@
       <_mvnpublish module="testutils" />
     </target>
 
-    <target name="mvn-init" unless="mvn-init.complete">
+    <target name="mvn-init" unless="mvn-init.complete" description="Get Maven Ant Tasts jar and deploy all Hive jars to local Maven repo">
         <echo message="${ant.project.name}"/>
         <get src="${mvnrepo}/org/apache/maven/maven-ant-tasks/${maven-ant-tasks.version}/maven-ant-tasks-${maven-ant-tasks.version}.jar"
              dest="${path.to.basedir}/build/maven-ant-tasks-${maven-ant-tasks.version}.jar"

Modified: hive/branches/tez/hcatalog/build-support/ant/test.xml
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/build-support/ant/test.xml?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/build-support/ant/test.xml (original)
+++ hive/branches/tez/hcatalog/build-support/ant/test.xml Mon Jul 29 21:08:03 2013
@@ -47,6 +47,9 @@
         <sysproperty key="hive.metastore.warehouse.dir" value="${test.warehouse.dir}"/>
         <sysproperty key="java.security.krb5.realm" value=""/> <!-- HADOOP-7489 -->
         <sysproperty key="java.security.krb5.kdc" value=""/> <!-- HADOOP-7489 -->
+        <!--HCAT_PREFIX, HIVE_HOME are needed by WebHCat tests-->
+        <env key="HCAT_PREFIX" value="${env.HCAT_PREFIX}"/>
+        <env key="HIVE_HOME" value="${env.HIVE_HOME}"/>
         <classpath>
           <path refid="test.class.path"/>
           <pathelement location="${clover.jar}"/>
@@ -62,6 +65,12 @@
           <enable/>
         </assertions>
       </junit>
+      <copy todir="${test.result.dir}">
+        <!--make sure hive's 'ant testreport' includes them-->
+        <fileset dir="${test.logs}">
+          <include name="**/TEST-*.xml"/>
+        </fileset>
+      </copy>
       <fail if="tests.failed">Tests failed!</fail>
     </sequential>
   </macrodef>

Modified: hive/branches/tez/hcatalog/build.properties
URL: http://svn.apache.org/viewvc/hive/branches/tez/hcatalog/build.properties?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/hcatalog/build.properties (original)
+++ hive/branches/tez/hcatalog/build.properties Mon Jul 29 21:08:03 2013
@@ -39,7 +39,7 @@ test.timeout=2700000
 test.warehouse.dir=${test.dir}/hcat_junit_warehouse
 mvnrepo=http://repo2.maven.org/maven2
 test.src.dir=${basedir}/src/test
-test.junit.output.format=plain
+test.junit.output.format=xml
 test.output=no
 test.excludes=e2e/**
 clover.jar=${clover.home}/lib/clover.jar