You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2011/02/01 07:33:49 UTC

svn commit: r1065920 - in /hive/trunk: ./ hbase-handler/src/java/org/apache/hadoop/hive/hbase/ hbase-handler/src/test/org/apache/hadoop/hive/hbase/ hbase-handler/src/test/queries/ hbase-handler/src/test/results/ ivy/ lib/ ql/ ql/src/test/org/apache/had...

Author: cws
Date: Tue Feb  1 06:33:48 2011
New Revision: 1065920

URL: http://svn.apache.org/viewvc?rev=1065920&view=rev
Log:
HIVE-1235 use Ivy for fetching HBase dependencies (John Sichi via cws)

Removed:
    hive/trunk/lib/hbase-0.20.3-test.jar
    hive/trunk/lib/hbase-0.20.3.jar
    hive/trunk/lib/zookeeper-3.2.2.jar
Modified:
    hive/trunk/CHANGES.txt
    hive/trunk/build-common.xml
    hive/trunk/build.xml
    hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
    hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java
    hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
    hive/trunk/hbase-handler/src/test/queries/hbase_bulk.m
    hive/trunk/hbase-handler/src/test/results/hbase_bulk.m.out
    hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out
    hive/trunk/ivy/ivysettings.xml
    hive/trunk/ivy/libraries.properties
    hive/trunk/ql/build.xml
    hive/trunk/ql/ivy.xml
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java

Modified: hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hive/trunk/CHANGES.txt?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/CHANGES.txt (original)
+++ hive/trunk/CHANGES.txt Tue Feb  1 06:33:48 2011
@@ -171,6 +171,8 @@ Trunk -  Unreleased
 
   IMPROVEMENTS
 
+    HIVE-1235 use Ivy for fetching HBase dependencies (John Sichi via cws)
+
     HIVE-1931 Improve the implementation of the METASTORE_CACHE_PINOBJTYPES config (Mac Yang via cws)"
 
     HIVE-1692. FetchOperator.getInputFormatFromCache hides causal exception (Philip Zeyliger via cws)

Modified: hive/trunk/build-common.xml
URL: http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Tue Feb  1 06:33:48 2011
@@ -105,7 +105,7 @@
   </condition>
 
   <!--this is the naming policy for artifacts we want pulled down-->
-  <property name="ivy.artifact.retrieve.pattern" value="[conf]/[artifact]-[revision].[ext]"/>
+  <property name="ivy.artifact.retrieve.pattern" value="[conf]/[artifact]-[revision](-[classifier]).[ext]"/>
 
   <target name="ivy-init-dirs">
     <mkdir dir="${build.ivy.dir}" />
@@ -272,7 +272,7 @@
 
   <target name="setup"/>
 
-  <target name="compile" depends="init, install-hadoopcore, setup">
+  <target name="compile" depends="init, setup">
     <echo message="Compiling: ${ant.project.name}"/>
     <javac
      encoding="${build.encoding}"

Modified: hive/trunk/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/build.xml?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/build.xml (original)
+++ hive/trunk/build.xml Tue Feb  1 06:33:48 2011
@@ -53,16 +53,6 @@
   <property name="checkstyle.build.dir" location="${build.dir.hive}/checkstyle"/>
   <property name="md5sum.format" value="{0}  {1}"/>
 
-  <!-- hbase-handler is enabled by default only for Hadoop 0.20.x -->
-  <condition property="hbase.enabled" value="true" else="false">
-    <matches string="${hadoop.version.ant-internal}" pattern="^0\.20\..*" />
-  </condition>
-
-  <!-- only iterate over hbase-handler when enabled -->
-  <condition property="hbase.iterate" value=",hbase-handler/build.xml" else="">
-    <istrue value="${hbase.enabled}"/>
-  </condition>
-
   <condition property="is-offline" value="true" else="false">
     <isset property="offline"/>
   </condition>
@@ -121,7 +111,7 @@
       <subant target="@{target}">
         <property name="build.dir.hive" location="${build.dir.hive}"/>
         <property name="is-offline" value="${is-offline}"/>
-        <filelist dir="." files="ant/build.xml,shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,cli/build.xml,contrib/build.xml,service/build.xml,jdbc/build.xml,hwi/build.xml${hbase.iterate},ant/build.xml"/>
+        <filelist dir="." files="ant/build.xml,shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,cli/build.xml,contrib/build.xml,service/build.xml,jdbc/build.xml,hwi/build.xml,hbase-handler/build.xml,ant/build.xml"/>
       </subant>
     </sequential>
   </macrodef>
@@ -132,7 +122,7 @@
       <subant target="@{target}">
         <property name="build.dir.hive" location="${build.dir.hive}"/>
         <property name="is-offline" value="${is-offline}"/>
-        <filelist dir="." files="common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,cli/build.xml,contrib/build.xml,service/build.xml,jdbc/build.xml,hwi/build.xml${hbase.iterate}"/>
+        <filelist dir="." files="common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,cli/build.xml,contrib/build.xml,service/build.xml,jdbc/build.xml,hwi/build.xml,hbase-handler/build.xml"/>
       </subant>
     </sequential>
   </macrodef>
@@ -143,7 +133,7 @@
       <subant target="@{target}">
         <property name="build.dir.hive" location="${build.dir.hive}"/>
         <property name="is-offline" value="${is-offline}"/>
-        <filelist dir="." files="shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,cli/build.xml,contrib/build.xml,service/build.xml,jdbc/build.xml,hwi/build.xml${hbase.iterate}"/>
+        <filelist dir="." files="shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,cli/build.xml,contrib/build.xml,service/build.xml,jdbc/build.xml,hwi/build.xml,hbase-handler/build.xml"/>
       </subant>
     </sequential>
   </macrodef>
@@ -660,5 +650,5 @@
     <copy file="${docs.src}/site.css" tofile="${anakia.docs.dest}/site.css" />
   </target>
 
-  
+
 </project>

Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Tue Feb  1 06:33:48 2011
@@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HBaseConf
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -72,6 +73,8 @@ public class HBaseStorageHandler extends
       return admin;
     } catch (MasterNotRunningException mnre) {
       throw new MetaException(StringUtils.stringifyException(mnre));
+    } catch (ZooKeeperConnectionException zkce) {
+      throw new MetaException(StringUtils.stringifyException(zkce));
     }
   }
 
@@ -295,7 +298,7 @@ public class HBaseStorageHandler extends
       // this better later when we support more interesting predicates.
       return null;
     }
-    
+
     DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
     decomposedPredicate.pushedPredicate = analyzer.translateSearchConditions(
       searchConditions);

Modified: hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java (original)
+++ hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseTestSetup.java Tue Feb  1 06:33:48 2011
@@ -26,7 +26,7 @@ import junit.framework.Test;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
-import org.apache.hadoop.hbase.MiniZooKeeperCluster;
+import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
 import org.apache.hadoop.hbase.client.HConnectionManager;
 import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hive.conf.HiveConf;

Modified: hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java (original)
+++ hive/trunk/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseSerDe.java Tue Feb  1 06:33:48 2011
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.hbase;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import java.util.Properties;
 
@@ -84,6 +85,7 @@ public class TestHBaseSerDe extends Test
     kvs.add(new KeyValue(rowKey, cfc, qualDouble, Bytes.toBytes("5.3")));
     kvs.add(new KeyValue(rowKey, cfa, qualString, Bytes.toBytes("Hadoop, HBase, and Hive")));
     kvs.add(new KeyValue(rowKey, cfb, qualBool, Bytes.toBytes("true")));
+    Collections.sort(kvs, KeyValue.COMPARATOR);
 
     Result r = new Result(kvs);
 

Modified: hive/trunk/hbase-handler/src/test/queries/hbase_bulk.m
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/queries/hbase_bulk.m?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/queries/hbase_bulk.m (original)
+++ hive/trunk/hbase-handler/src/test/queries/hbase_bulk.m Tue Feb  1 06:33:48 2011
@@ -11,17 +11,16 @@ TBLPROPERTIES ('hfile.family.path' = '/t
 
 -- this is a dummy table used for controlling how the input file
 -- for TotalOrderPartitioner is created
-create external table hbpartition(part_break string)
-row format serde 
+create table hbpartition(part_break string)
+row format serde
 'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe'
-stored as 
-inputformat 
+stored as
+inputformat
 'org.apache.hadoop.mapred.TextInputFormat'
-outputformat 
-'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat'
-location '/tmp/hbpartitions';
+outputformat
+'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat';
 
--- this should produce one file in /tmp/hbpartitions, but we do not
+-- this should produce one file, but we do not
 -- know what it will be called, so we will copy it to a well known
 -- filename /tmp/hbpartition.lst
 insert overwrite table hbpartition
@@ -29,8 +28,8 @@ select distinct value
 from src
 where value='val_100' or value='val_200';
 
-dfs -count /tmp/hbpartitions;
-dfs -cp /tmp/hbpartitions/* /tmp/hbpartition.lst;
+dfs -count /build/ql/test/data/warehouse/hbpartition;
+dfs -cp /build/ql/test/data/warehouse/hbpartition/* /tmp/hbpartition.lst;
 
 set mapred.reduce.tasks=3;
 set hive.mapred.partitioner=org.apache.hadoop.mapred.lib.TotalOrderPartitioner;

Modified: hive/trunk/hbase-handler/src/test/results/hbase_bulk.m.out
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/results/hbase_bulk.m.out?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/results/hbase_bulk.m.out (original)
+++ hive/trunk/hbase-handler/src/test/results/hbase_bulk.m.out Tue Feb  1 06:33:48 2011
@@ -25,30 +25,28 @@ POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@hbsort
 PREHOOK: query: -- this is a dummy table used for controlling how the input file
 -- for TotalOrderPartitioner is created
-create external table hbpartition(part_break string)
-row format serde 
+create table hbpartition(part_break string)
+row format serde
 'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe'
-stored as 
-inputformat 
+stored as
+inputformat
 'org.apache.hadoop.mapred.TextInputFormat'
-outputformat 
+outputformat
 'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat'
-location '/tmp/hbpartitions'
 PREHOOK: type: CREATETABLE
 POSTHOOK: query: -- this is a dummy table used for controlling how the input file
 -- for TotalOrderPartitioner is created
-create external table hbpartition(part_break string)
-row format serde 
+create table hbpartition(part_break string)
+row format serde
 'org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe'
-stored as 
-inputformat 
+stored as
+inputformat
 'org.apache.hadoop.mapred.TextInputFormat'
-outputformat 
+outputformat
 'org.apache.hadoop.hive.ql.io.HiveNullValueSequenceFileOutputFormat'
-location '/tmp/hbpartitions'
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@hbpartition
-PREHOOK: query: -- this should produce one file in /tmp/hbpartitions, but we do not
+PREHOOK: query: -- this should produce one file, but we do not
 -- know what it will be called, so we will copy it to a well known
 -- filename /tmp/hbpartition.lst
 insert overwrite table hbpartition
@@ -58,7 +56,7 @@ where value='val_100' or value='val_200'
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@hbpartition
-POSTHOOK: query: -- this should produce one file in /tmp/hbpartitions, but we do not
+POSTHOOK: query: -- this should produce one file, but we do not
 -- know what it will be called, so we will copy it to a well known
 -- filename /tmp/hbpartition.lst
 insert overwrite table hbpartition
@@ -69,7 +67,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@hbpartition
 POSTHOOK: Lineage: hbpartition.part_break SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
-           1            1                139 hdfs://localhost.localdomain:33778/tmp/hbpartitions
+           1            1                139 hdfs://localhost.localdomain:37780/build/ql/test/data/warehouse/hbpartition
 PREHOOK: query: -- this should produce three files in /tmp/hbsort/cf
 -- include some trailing blanks and nulls to make sure we handle them correctly
 insert overwrite table hbsort
@@ -98,7 +96,7 @@ POSTHOOK: Lineage: hbpartition.part_brea
 POSTHOOK: Lineage: hbsort.key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 POSTHOOK: Lineage: hbsort.val EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: hbsort.val2 EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
-           1            3              23227 hdfs://localhost.localdomain:33778/tmp/hbsort/cf
+           1            3              23380 hdfs://localhost.localdomain:37780/tmp/hbsort/cf
 PREHOOK: query: -- To get the files out to your local filesystem for loading into
 -- HBase, run mkdir -p /tmp/blah/cf, then uncomment and
 -- semicolon-terminate the line below before running this test:

Modified: hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out (original)
+++ hive/trunk/hbase-handler/src/test/results/hbase_stats.q.out Tue Feb  1 06:33:48 2011
@@ -14,11 +14,11 @@ POSTHOOK: Output: default@stats_src
 POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: analyze table stats_src compute statistics
-PREHOOK: type: null
+PREHOOK: type: QUERY
 PREHOOK: Input: default@stats_src
 PREHOOK: Output: default@stats_src
 POSTHOOK: query: analyze table stats_src compute statistics
-POSTHOOK: type: null
+POSTHOOK: type: QUERY
 POSTHOOK: Input: default@stats_src
 POSTHOOK: Output: default@stats_src
 POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -37,18 +37,14 @@ value               	string             
 # Detailed Table Information	 	 
 Database:           	default             	 
 Owner:              	null                	 
-CreateTime:         	Sun Oct 17 21:42:31 PDT 2010	 
+CreateTime:         	Tue Jan 25 14:48:20 PST 2011	 
 LastAccessTime:     	UNKNOWN             	 
 Protect Mode:       	None                	 
 Retention:          	0                   	 
-Location:           	pfile:/data/users/njain/hive_commit1/hive_commit1/build/hbase-handler/test/data/warehouse/stats_src	 
+Location:           	pfile:/data/users/jsichi/open/hive-trunk/build/hbase-handler/test/data/warehouse/stats_src	 
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	numFiles            	1                   
-	numPartitions       	0                   
-	numRows             	500                 
-	totalSize           	5812                
-	transient_lastDdlTime	1287376963          
+	transient_lastDdlTime	1295995714          
 	 	 
 # Storage Information	 	 
 SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
@@ -94,11 +90,11 @@ POSTHOOK: Lineage: hbase_part PARTITION(
 POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: analyze table hbase_part partition(ds='2008-04-08', hr=11) compute statistics
-PREHOOK: type: null
+PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_part@ds=2008-04-08/hr=11
 PREHOOK: Output: default@hbase_part
 POSTHOOK: query: analyze table hbase_part partition(ds='2008-04-08', hr=11) compute statistics
-POSTHOOK: type: null
+POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_part@ds=2008-04-08/hr=11
 POSTHOOK: Output: default@hbase_part
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -108,11 +104,11 @@ POSTHOOK: Lineage: hbase_part PARTITION(
 POSTHOOK: Lineage: stats_src.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
 POSTHOOK: Lineage: stats_src.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
 PREHOOK: query: analyze table hbase_part partition(ds='2008-04-08', hr=12) compute statistics
-PREHOOK: type: null
+PREHOOK: type: QUERY
 PREHOOK: Input: default@hbase_part@ds=2008-04-08/hr=12
 PREHOOK: Output: default@hbase_part
 POSTHOOK: query: analyze table hbase_part partition(ds='2008-04-08', hr=12) compute statistics
-POSTHOOK: type: null
+POSTHOOK: type: QUERY
 POSTHOOK: Input: default@hbase_part@ds=2008-04-08/hr=12
 POSTHOOK: Output: default@hbase_part
 POSTHOOK: Lineage: hbase_part PARTITION(ds=2010-04-08,hr=11).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
@@ -145,18 +141,14 @@ hr                  	string             
 # Detailed Table Information	 	 
 Database:           	default             	 
 Owner:              	null                	 
-CreateTime:         	Sun Oct 17 21:42:44 PDT 2010	 
+CreateTime:         	Tue Jan 25 14:49:11 PST 2011	 
 LastAccessTime:     	UNKNOWN             	 
 Protect Mode:       	None                	 
 Retention:          	0                   	 
-Location:           	pfile:/data/users/njain/hive_commit1/hive_commit1/build/hbase-handler/test/data/warehouse/hbase_part	 
+Location:           	pfile:/data/users/jsichi/open/hive-trunk/build/hbase-handler/test/data/warehouse/hbase_part	 
 Table Type:         	MANAGED_TABLE       	 
 Table Parameters:	 	 
-	numFiles            	2                   
-	numPartitions       	2                   
-	numRows             	1000                
-	totalSize           	11624               
-	transient_lastDdlTime	1287376983          
+	transient_lastDdlTime	1295995751          
 	 	 
 # Storage Information	 	 
 SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
@@ -193,15 +185,12 @@ hr                  	string             
 Partition Value:    	[2010-04-08, 11]    	 
 Database:           	default             	 
 Table:              	hbase_part          	 
-CreateTime:         	Sun Oct 17 21:42:49 PDT 2010	 
+CreateTime:         	Tue Jan 25 14:49:26 PST 2011	 
 LastAccessTime:     	UNKNOWN             	 
 Protect Mode:       	None                	 
-Location:           	pfile:/data/users/njain/hive_commit1/hive_commit1/build/hbase-handler/test/data/warehouse/hbase_part/ds=2010-04-08/hr=11	 
+Location:           	pfile:/data/users/jsichi/open/hive-trunk/build/hbase-handler/test/data/warehouse/hbase_part/ds=2010-04-08/hr=11	 
 Partition Parameters:	 	 
-	numFiles            	1                   
-	numRows             	500                 
-	totalSize           	5812                
-	transient_lastDdlTime	1287376969          
+	transient_lastDdlTime	1295995766          
 	 	 
 # Storage Information	 	 
 SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
@@ -238,15 +227,12 @@ hr                  	string             
 Partition Value:    	[2010-04-08, 12]    	 
 Database:           	default             	 
 Table:              	hbase_part          	 
-CreateTime:         	Sun Oct 17 21:42:54 PDT 2010	 
+CreateTime:         	Tue Jan 25 14:49:52 PST 2011	 
 LastAccessTime:     	UNKNOWN             	 
 Protect Mode:       	None                	 
-Location:           	pfile:/data/users/njain/hive_commit1/hive_commit1/build/hbase-handler/test/data/warehouse/hbase_part/ds=2010-04-08/hr=12	 
+Location:           	pfile:/data/users/jsichi/open/hive-trunk/build/hbase-handler/test/data/warehouse/hbase_part/ds=2010-04-08/hr=12	 
 Partition Parameters:	 	 
-	numFiles            	1                   
-	numRows             	500                 
-	totalSize           	5812                
-	transient_lastDdlTime	1287376974          
+	transient_lastDdlTime	1295995792          
 	 	 
 # Storage Information	 	 
 SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 

Modified: hive/trunk/ivy/ivysettings.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ivy/ivysettings.xml?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/ivy/ivysettings.xml (original)
+++ hive/trunk/ivy/ivysettings.xml Tue Feb  1 06:33:48 2011
@@ -55,17 +55,17 @@
        <artifact pattern="${repo.dir}/org/apache/hadoop/[module]/[revision]/[module]-[revision].[ext]"/>
        <ivy pattern="${repo.dir}/org/apache/hadoop/[module]/[revision]/[module]-[revision].pom"/>
     </filesystem>
-    
+
     <chain name="default" dual="true" checkmodified="true" changingPattern=".*SNAPSHOT">
       <resolver ref="hadoop-source"/>
-      <resolver ref="apache-snapshot"/> 
+      <resolver ref="apache-snapshot"/>
       <resolver ref="maven2"/>
       <resolver ref="datanucleus-repo"/>
     </chain>
 
     <chain name="internal" dual="true">
       <resolver ref="fs"/>
-      <resolver ref="apache-snapshot"/> 
+      <resolver ref="apache-snapshot"/>
       <resolver ref="maven2"/>
       <resolver ref="datanucleus-repo"/>
     </chain>
@@ -74,9 +74,9 @@
       <resolver ref="maven2"/>
       <resolver ref="datanucleus-repo"/>
     </chain>
-    
+
   </resolvers>
-  
+
   <modules>
      <module organisation="org.apache.hadoop" name="hadoop-*" resolver="${resolvers}"/>
   </modules>

Modified: hive/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hive/trunk/ivy/libraries.properties?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/ivy/libraries.properties (original)
+++ hive/trunk/ivy/libraries.properties Tue Feb  1 06:33:48 2011
@@ -35,8 +35,8 @@ commons-lang.version=2.4
 commons-logging.version=1.0.4
 commons-logging-api.version=1.0.4
 commons-pool.version=1.5.4
-hbase.version=0.20.3
-hbase-test.version=0.20.3
+hbase.version=0.89.0-SNAPSHOT
+hbase-test.version=0.89.0-SNAPSHOT
 jdo-api.version=2.3-ec
 jdom.version=1.1
 jline.version=0.9.94
@@ -47,4 +47,5 @@ slf4j-api.version=1.6.1
 slf4j-log4j12.version=1.6.1
 thrift.version=0.5.0
 thrift-fb303.version=0.5.0
-zookeeper.version=3.2.2
+zookeeper.version=3.3.1
+guava.version=r06

Modified: hive/trunk/ql/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/build.xml?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/ql/build.xml (original)
+++ hive/trunk/ql/build.xml Tue Feb  1 06:33:48 2011
@@ -148,7 +148,7 @@
     <mkdir dir="${build.dir}/gen/antlr/gen-java/org/apache/hadoop/hive/ql/parse"/>
   </target>
 
-  <target name="compile" depends="init, ql-init, build-grammar">
+  <target name="compile" depends="init, ql-init, build-grammar, ivy-retrieve">
     <echo message="Compiling: ${name}"/>
     <javac
      encoding="${build.encoding}"

Modified: hive/trunk/ql/ivy.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/ivy.xml?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/ql/ivy.xml (original)
+++ hive/trunk/ql/ivy.xml Tue Feb  1 06:33:48 2011
@@ -1,4 +1,4 @@
-<ivy-module version="2.0">
+<ivy-module version="2.0" xmlns:m="http://ant.apache.org/ivy/maven">
     <info organisation="org.apache.hadoop.hive" module="ql"/>
     <dependencies>
         <dependency org="hadoop" name="core" rev="${hadoop.version.ant-internal}">
@@ -6,5 +6,17 @@
         </dependency>
         <dependency org="org.slf4j" name="slf4j-api" rev="${slf4j-api.version}"/>
         <dependency org="org.slf4j" name="slf4j-log4j12" rev="${slf4j-log4j12.version}"/>
+        <dependency org="org.apache.hbase" name="hbase" rev="${hbase.version}"
+                    transitive="false">
+          <artifact name="hbase" type="jar"/>
+          <artifact name="hbase" type="test-jar" ext="jar"
+                    m:classifier="tests"/>
+        </dependency>
+        <dependency org="org.apache.zookeeper" name="zookeeper"
+                    rev="${zookeeper.version}" transitive="false">
+          <include type="jar"/>
+        </dependency>
+        <dependency org="com.google.guava" name="guava" rev="${guava.version}"
+                    transitive="false"/>
     </dependencies>
 </ivy-module>

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1065920&r1=1065919&r2=1065920&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Tue Feb  1 06:33:48 2011
@@ -47,7 +47,7 @@ import junit.framework.Test;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.MiniZooKeeperCluster;
+import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
 import org.apache.hadoop.hive.cli.CliDriver;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -338,7 +338,7 @@ public class QTestUtil {
       }
     }
     db.setCurrentDatabase(DEFAULT_DATABASE_NAME);
-    
+
     List<String> roleNames = db.getAllRoleNames();
       for (String roleName : roleNames) {
         db.dropRole(roleName);
@@ -395,9 +395,9 @@ public class QTestUtil {
   }
 
   public void createSources() throws Exception {
-    
+
     startSessionState();
-    
+
     // Create a bunch of tables with columns key and value
     LinkedList<String> cols = new LinkedList<String>();
     cols.add("key");
@@ -499,7 +499,7 @@ public class QTestUtil {
     testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
     // conf.logVars(System.out);
     // System.out.flush();
-    
+
     SessionState.start(conf);
     db = Hive.get(conf);
     fs = FileSystem.get(conf);
@@ -580,7 +580,7 @@ public class QTestUtil {
 
   private CliSessionState startSessionState()
       throws FileNotFoundException, UnsupportedEncodingException {
-    
+
     HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
         "org.apache.hadoop.hive.ql.security.DummyAuthenticator");