You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2012/03/16 03:35:07 UTC

svn commit: r1301315 - in /hive/trunk/hbase-handler/src: java/org/apache/hadoop/hive/hbase/ test/queries/ test/results/

Author: cws
Date: Fri Mar 16 02:35:07 2012
New Revision: 1301315

URL: http://svn.apache.org/viewvc?rev=1301315&view=rev
Log:
HIVE-2815 [jira] Filter pushdown in hbase for keys stored in binary format
(Ashutosh Chauhan via Carl Steinbach)

Summary:
Further support for pushdown on keys stored in binary format

This patch enables filter pushdown for keys stored in binary format in hbase

Test Plan: Included a new test case.

Reviewers: JIRA, jsichi, njain, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D1875

Added:
    hive/trunk/hbase-handler/src/test/queries/external_table_ppd.q
    hive/trunk/hbase-handler/src/test/results/external_table_ppd.q.out
Modified:
    hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
    hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
    hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java

Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java?rev=1301315&r1=1301314&r2=1301315&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java Fri Mar 16 02:35:07 2012
@@ -788,6 +788,10 @@ public class HBaseSerDe implements SerDe
     return iKey;
   }
 
+  List<Boolean> getStorageFormatOfCol(int colPos){
+    return columnsMapping.get(colPos).binaryStorage;
+  }
+
   public SerDeStats getSerDeStats() {
     // no support for statistics
     return null;

Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java?rev=1301315&r1=1301314&r2=1301315&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java Fri Mar 16 02:35:07 2012
@@ -264,6 +264,8 @@ public class HBaseStorageHandler extends
     jobProperties.put(
       HBaseSerDe.HBASE_COLUMNS_MAPPING,
       tableProperties.getProperty(HBaseSerDe.HBASE_COLUMNS_MAPPING));
+    jobProperties.put(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE,
+      tableProperties.getProperty(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE,"string"));
 
     String tableName =
       tableProperties.getProperty(HBaseSerDe.HBASE_TABLE_NAME);
@@ -289,11 +291,12 @@ public class HBaseStorageHandler extends
       Arrays.asList(columnNameProperty.split(","));
 
     HBaseSerDe hbaseSerde = (HBaseSerDe) deserializer;
-    String keyColName = columnNames.get(hbaseSerde.getKeyColumnOffset());
+    int keyColPos = hbaseSerde.getKeyColumnOffset();
     String keyColType = jobConf.get(org.apache.hadoop.hive.serde.Constants.LIST_COLUMN_TYPES).
-        split(",")[hbaseSerde.getKeyColumnOffset()];
+        split(",")[keyColPos];
     IndexPredicateAnalyzer analyzer =
-      HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(keyColName, keyColType);
+      HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(columnNames.get(keyColPos), keyColType,
+        hbaseSerde.getStorageFormatOfCol(keyColPos).get(0));
     List<IndexSearchCondition> searchConditions =
       new ArrayList<IndexSearchCondition>();
     ExprNodeDesc residualPredicate =

Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java?rev=1301315&r1=1301314&r2=1301315&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java Fri Mar 16 02:35:07 2012
@@ -47,10 +47,18 @@ import org.apache.hadoop.hive.serde.Cons
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
@@ -150,7 +158,9 @@ public class HiveHBaseTableInputFormat e
       throw new IOException(e);
     }
 
-    tableSplit = convertFilter(jobConf, scan, tableSplit, iKey);
+    tableSplit = convertFilter(jobConf, scan, tableSplit, iKey,
+      getStorageFormatOfKey(columnsMapping.get(iKey).mappingSpec,
+      jobConf.get(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE, "string")));
     setScan(scan);
     Job job = new Job(jobConf);
     TaskAttemptContext tac = ShimLoader.getHadoopShims().newTaskAttemptContext(
@@ -235,7 +245,7 @@ public class HiveHBaseTableInputFormat e
     JobConf jobConf,
     Scan scan,
     TableSplit tableSplit,
-    int iKey)
+    int iKey, boolean isKeyBinary)
     throws IOException {
 
     String filterExprSerialized =
@@ -248,7 +258,7 @@ public class HiveHBaseTableInputFormat e
 
     String colName = jobConf.get(Constants.LIST_COLUMNS).split(",")[iKey];
     String colType = jobConf.get(Constants.LIST_COLUMN_TYPES).split(",")[iKey];
-    IndexPredicateAnalyzer analyzer = newIndexPredicateAnalyzer(colName,colType);
+    IndexPredicateAnalyzer analyzer = newIndexPredicateAnalyzer(colName,colType, isKeyBinary);
 
     List<IndexSearchCondition> searchConditions =
       new ArrayList<IndexSearchCondition>();
@@ -273,42 +283,38 @@ public class HiveHBaseTableInputFormat e
     IndexSearchCondition sc = searchConditions.get(0);
     ExprNodeConstantEvaluator eval =
       new ExprNodeConstantEvaluator(sc.getConstantDesc());
-    byte [] row;
-    try {
-      ObjectInspector objInspector = eval.initialize(null);
-      Object writable = eval.evaluate(null);
-      ByteStream.Output serializeStream = new ByteStream.Output();
-      LazyUtils.writePrimitiveUTF8(
-        serializeStream,
-        writable,
-        (PrimitiveObjectInspector) objInspector,
-        false,
-        (byte) 0,
-        null);
-      row = new byte[serializeStream.getCount()];
-      System.arraycopy(
-        serializeStream.getData(), 0,
-        row, 0, serializeStream.getCount());
-    } catch (HiveException ex) {
-      throw new IOException(ex);
+
+    PrimitiveObjectInspector objInspector;
+    Object writable;
+
+    try{
+      objInspector = (PrimitiveObjectInspector)eval.initialize(null);
+      writable = eval.evaluate(null);
+    } catch (ClassCastException cce) {
+      throw new IOException("Currently only primitve types are supported. Found: " +
+        sc.getConstantDesc().getTypeString());
+    } catch (HiveException e) {
+      throw new IOException(e);
     }
 
+    byte [] constantVal = getConstantVal(writable, objInspector, isKeyBinary);
     byte [] startRow = HConstants.EMPTY_START_ROW, stopRow = HConstants.EMPTY_END_ROW;
     String comparisonOp = sc.getComparisonOp();
+
     if("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual".equals(comparisonOp)){
-      startRow = row;
-      stopRow = getNextBA(row);
+      startRow = constantVal;
+      stopRow = getNextBA(constantVal);
     } else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan".equals(comparisonOp)){
-      stopRow = row;
+      stopRow = constantVal;
     } else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan"
         .equals(comparisonOp)) {
-      startRow = row;
+      startRow = constantVal;
     } else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan"
         .equals(comparisonOp)){
-      startRow = getNextBA(row);
+      startRow = getNextBA(constantVal);
     } else if ("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan"
         .equals(comparisonOp)){
-      stopRow = getNextBA(row);
+      stopRow = getNextBA(constantVal);
     } else {
       throw new IOException(comparisonOp + " is not a supported comparison operator");
     }
@@ -325,6 +331,44 @@ public class HiveHBaseTableInputFormat e
     return tableSplit;
   }
 
+    private byte[] getConstantVal(Object writable, PrimitiveObjectInspector poi,
+        boolean isKeyBinary) throws IOException{
+
+        if (!isKeyBinary){
+          // Key is stored in text format. Get bytes representation of constant also of text format.
+          byte[] startRow;
+          ByteStream.Output serializeStream = new ByteStream.Output();
+          LazyUtils.writePrimitiveUTF8(serializeStream, writable, poi, false, (byte) 0, null);
+          startRow = new byte[serializeStream.getCount()];
+          System.arraycopy(serializeStream.getData(), 0, startRow, 0, serializeStream.getCount());
+          return startRow;
+        }
+
+        PrimitiveCategory pc = poi.getPrimitiveCategory();
+        switch (poi.getPrimitiveCategory()) {
+        case INT:
+            return Bytes.toBytes(((IntWritable)writable).get());
+        case BOOLEAN:
+            return Bytes.toBytes(((BooleanWritable)writable).get());
+        case LONG:
+            return Bytes.toBytes(((LongWritable)writable).get());
+        case FLOAT:
+            return Bytes.toBytes(((FloatWritable)writable).get());
+        case DOUBLE:
+            return Bytes.toBytes(((DoubleWritable)writable).get());
+        case SHORT:
+            return Bytes.toBytes(((ShortWritable)writable).get());
+        case STRING:
+            return Bytes.toBytes(((Text)writable).toString());
+        case BYTE:
+            return Bytes.toBytes(((ByteWritable)writable).get());
+
+        default:
+          throw new IOException("Type not supported " + pc);
+        }
+      }
+
+
   private byte[] getNextBA(byte[] current){
     // startRow is inclusive while stopRow is exclusive,
     //this util method returns very next bytearray which will occur after the current one
@@ -343,12 +387,16 @@ public class HiveHBaseTableInputFormat e
    * @return preconfigured predicate analyzer
    */
   static IndexPredicateAnalyzer newIndexPredicateAnalyzer(
-    String keyColumnName, String keyColType) {
+    String keyColumnName, String keyColType, boolean isKeyBinary) {
 
     IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();
 
+    // We can always do equality predicate. Just need to make sure we get appropriate
+    // BA representation of constant of filter condition.
     analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual");
-    if(keyColType.equalsIgnoreCase("string")){
+    // We can do other comparisons only if storage format in hbase is either binary
+    // or we are dealing with string types since there lexographic ordering will suffice.
+    if(isKeyBinary || (keyColType.equalsIgnoreCase("string"))){
       analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic." +
         "GenericUDFOPEqualOrGreaterThan");
       analyzer.addComparisonOp("org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan");
@@ -412,7 +460,9 @@ public class HiveHBaseTableInputFormat e
     // split per region, the implementation actually takes the scan
     // definition into account and excludes regions which don't satisfy
     // the start/stop row conditions (HBASE-1829).
-    convertFilter(jobConf, scan, null, iKey);
+    convertFilter(jobConf, scan, null, iKey,
+      getStorageFormatOfKey(columnsMapping.get(iKey).mappingSpec,
+      jobConf.get(HBaseSerDe.HBASE_TABLE_DEFAULT_STORAGE_TYPE, "string")));
 
     setScan(scan);
     Job job = new Job(jobConf);
@@ -429,4 +479,28 @@ public class HiveHBaseTableInputFormat e
 
     return results;
   }
+
+  private boolean getStorageFormatOfKey(String spec, String defaultFormat) throws IOException{
+
+    String[] mapInfo = spec.split("#");
+    boolean tblLevelDefault = "binary".equalsIgnoreCase(defaultFormat) ? true : false;
+
+    switch (mapInfo.length) {
+    case 1:
+      return tblLevelDefault;
+
+    case 2:
+      String storageType = mapInfo[1];
+      if(storageType.equals("-")) {
+        return tblLevelDefault;
+      } else if ("string".startsWith(storageType)){
+        return false;
+      } else if ("binary".startsWith(storageType)){
+        return true;
+      }
+
+    default:
+      throw new IOException("Malformed string: " + spec);
+    }
+  }
 }

Added: hive/trunk/hbase-handler/src/test/queries/external_table_ppd.q
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/queries/external_table_ppd.q?rev=1301315&view=auto
==============================================================================
--- hive/trunk/hbase-handler/src/test/queries/external_table_ppd.q (added)
+++ hive/trunk/hbase-handler/src/test/queries/external_table_ppd.q Fri Mar 16 02:35:07 2012
@@ -0,0 +1,37 @@
+DROP TABLE t_hbase;
+
+CREATE TABLE t_hbase(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:binarykey#-,cf:binarybyte#-,cf:binaryshort#-,:key#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES ("hbase.table.name" = "t_hive",
+               "hbase.table.default.storage.type" = "binary");
+
+DESCRIBE FORMATTED t_hbase;
+
+INSERT OVERWRITE TABLE t_hbase
+SELECT 'user1', 1, 11, 10, 1, 1.0, 1.0, true
+FROM src
+WHERE key=100 OR key=125 OR key=126;
+
+INSERT OVERWRITE TABLE t_hbase
+SELECT 'user2', 127, 327, 2147, 9223372036854775807, 211.31, 268746532.0571, false
+FROM src
+WHERE key=100 OR key=125 OR key=126;
+
+INSERT OVERWRITE TABLE t_hbase
+SELECT 'user3', -128, -327, -214748, -9223372036854775808, -201.17, -2110789.37145, true
+FROM src
+WHERE key=100 OR key=125 OR key=126;
+
+explain SELECT * FROM t_hbase where int_col > 0;
+SELECT * FROM t_hbase where int_col > 0;
+
+DROP TABLE t_hbase;
+

Added: hive/trunk/hbase-handler/src/test/results/external_table_ppd.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/results/external_table_ppd.q.out?rev=1301315&view=auto
==============================================================================
--- hive/trunk/hbase-handler/src/test/results/external_table_ppd.q.out (added)
+++ hive/trunk/hbase-handler/src/test/results/external_table_ppd.q.out Fri Mar 16 02:35:07 2012
@@ -0,0 +1,186 @@
+PREHOOK: query: DROP TABLE t_hbase
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE t_hbase
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE t_hbase(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:binarykey#-,cf:binarybyte#-,cf:binaryshort#-,:key#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES ("hbase.table.name" = "t_hive",
+               "hbase.table.default.storage.type" = "binary")
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE t_hbase(key STRING,
+                     tinyint_col TINYINT,
+                     smallint_col SMALLINT,
+                     int_col INT,
+                     bigint_col BIGINT,
+                     float_col FLOAT,
+                     double_col DOUBLE,
+                     boolean_col BOOLEAN)
+STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
+WITH SERDEPROPERTIES ("hbase.columns.mapping" = "cf:binarykey#-,cf:binarybyte#-,cf:binaryshort#-,:key#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-")
+TBLPROPERTIES ("hbase.table.name" = "t_hive",
+               "hbase.table.default.storage.type" = "binary")
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: DESCRIBE FORMATTED t_hbase
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: DESCRIBE FORMATTED t_hbase
+POSTHOOK: type: DESCTABLE
+# col_name            	data_type           	comment             
+	 	 
+key                 	string              	from deserializer   
+tinyint_col         	tinyint             	from deserializer   
+smallint_col        	smallint            	from deserializer   
+int_col             	int                 	from deserializer   
+bigint_col          	bigint              	from deserializer   
+float_col           	float               	from deserializer   
+double_col          	double              	from deserializer   
+boolean_col         	boolean             	from deserializer   
+	 	 
+# Detailed Table Information	 	 
+Database:           	default             	 
+#### A masked pattern was here ####
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+#### A masked pattern was here ####
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	hbase.table.default.storage.type	binary              
+	hbase.table.name    	t_hive              
+	storage_handler     	org.apache.hadoop.hive.hbase.HBaseStorageHandler
+#### A masked pattern was here ####
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.hbase.HBaseSerDe	 
+InputFormat:        	org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	hbase.columns.mapping	cf:binarykey#-,cf:binarybyte#-,cf:binaryshort#-,:key#-,cf:binarylong#-,cf:binaryfloat#-,cf:binarydouble#-,cf:binaryboolean#-
+	serialization.format	1                   
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user1', 1, 11, 10, 1, 1.0, 1.0, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user1', 1, 11, 10, 1, 1.0, 1.0, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user2', 127, 327, 2147, 9223372036854775807, 211.31, 268746532.0571, false
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user2', 127, 327, 2147, 9223372036854775807, 211.31, 268746532.0571, false
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user3', -128, -327, -214748, -9223372036854775808, -201.17, -2110789.37145, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@t_hbase
+POSTHOOK: query: INSERT OVERWRITE TABLE t_hbase
+SELECT 'user3', -128, -327, -214748, -9223372036854775808, -201.17, -2110789.37145, true
+FROM src
+WHERE key=100 OR key=125 OR key=126
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@t_hbase
+PREHOOK: query: explain SELECT * FROM t_hbase where int_col > 0
+PREHOOK: type: QUERY
+POSTHOOK: query: explain SELECT * FROM t_hbase where int_col > 0
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME t_hbase))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (> (TOK_TABLE_OR_COL int_col) 0))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        t_hbase 
+          TableScan
+            alias: t_hbase
+            filterExpr:
+                expr: (int_col > 0)
+                type: boolean
+            Filter Operator
+              predicate:
+                  expr: (int_col > 0)
+                  type: boolean
+              Select Operator
+                expressions:
+                      expr: key
+                      type: string
+                      expr: tinyint_col
+                      type: tinyint
+                      expr: smallint_col
+                      type: smallint
+                      expr: int_col
+                      type: int
+                      expr: bigint_col
+                      type: bigint
+                      expr: float_col
+                      type: float
+                      expr: double_col
+                      type: double
+                      expr: boolean_col
+                      type: boolean
+                outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: SELECT * FROM t_hbase where int_col > 0
+PREHOOK: type: QUERY
+PREHOOK: Input: default@t_hbase
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM t_hbase where int_col > 0
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@t_hbase
+#### A masked pattern was here ####
+user1	1	11	10	1	1.0	1.0	true
+user2	127	327	2147	9223372036854775807	211.31	2.687465320571E8	false
+PREHOOK: query: DROP TABLE t_hbase
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@t_hbase
+PREHOOK: Output: default@t_hbase
+POSTHOOK: query: DROP TABLE t_hbase
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@t_hbase
+POSTHOOK: Output: default@t_hbase