You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ha...@apache.org on 2013/09/19 20:58:07 UTC

svn commit: r1524806 - in /hive/branches/vectorization: conf/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/java/org/apache/hadoop/hive/ql/io/orc/ ql/src/test/org/apache/hadoop/hive/ql/ ql/src/test/queries/clientpositive/ ql/src/test/resu...

Author: hashutosh
Date: Thu Sep 19 18:58:07 2013
New Revision: 1524806

URL: http://svn.apache.org/r1524806
Log:
HIVE-5309 : Update hive-default.xml.template for vectorization flag; remove unused imports from MetaStoreUtils.java (Jitendra Nath Pandey via Ashutosh Chauhan)

Added:
    hive/branches/vectorization/ql/src/test/queries/clientpositive/vectorized_rcfile_columnar.q
    hive/branches/vectorization/ql/src/test/results/clientpositive/vectorized_rcfile_columnar.q.out
Modified:
    hive/branches/vectorization/conf/hive-default.xml.template
    hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
    hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
    hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java

Modified: hive/branches/vectorization/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/conf/hive-default.xml.template?rev=1524806&r1=1524805&r2=1524806&view=diff
==============================================================================
--- hive/branches/vectorization/conf/hive-default.xml.template (original)
+++ hive/branches/vectorization/conf/hive-default.xml.template Thu Sep 19 18:58:07 2013
@@ -1985,4 +1985,13 @@
   </description>
 </property>
 
+<property>
+  <name>hive.vectorized.execution.enabled</name>
+  <value>false</value>
+  <description>
+  This flag should be set to true to enable vectorized mode of query execution.
+  The default value is false.
+  </description>
+</property>
+
 </configuration>

Modified: hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java?rev=1524806&r1=1524805&r2=1524806&view=diff
==============================================================================
--- hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (original)
+++ hive/branches/vectorization/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java Thu Sep 19 18:58:07 2013
@@ -52,7 +52,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;

Modified: hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java?rev=1524806&r1=1524805&r2=1524806&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java (original)
+++ hive/branches/vectorization/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java Thu Sep 19 18:58:07 2013
@@ -102,7 +102,6 @@ class RecordReaderImpl implements Record
     this.included = included;
     this.sarg = sarg;
     if (sarg != null) {
-      System.out.println("DEBUG XXXX SARG is not null");
       sargLeaves = sarg.getLeaves();
       filterColumns = new int[sargLeaves.size()];
       for(int i=0; i < filterColumns.length; ++i) {
@@ -110,7 +109,6 @@ class RecordReaderImpl implements Record
         filterColumns[i] = findColumns(columnNames, colName);
       }
     } else {
-      System.out.println("DEBUG XXXX SARG is null");
       sargLeaves = null;
       filterColumns = null;
     }

Modified: hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1524806&r1=1524805&r2=1524806&view=diff
==============================================================================
--- hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/branches/vectorization/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Thu Sep 19 18:58:07 2013
@@ -107,8 +107,7 @@ public class QTestUtil {
   public static final HashSet<String> srcTables = new HashSet<String>
     (Arrays.asList(new String [] {
         "src", "src1", "srcbucket", "srcbucket2", "src_json", "src_thrift",
-        "src_sequencefile", "srcpart",
-        AllVectorTypesRecord.TABLE_NAME
+        "src_sequencefile", "srcpart", "alltypesorc"
       }));
 
   private ParseDriver pd;

Added: hive/branches/vectorization/ql/src/test/queries/clientpositive/vectorized_rcfile_columnar.q
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/queries/clientpositive/vectorized_rcfile_columnar.q?rev=1524806&view=auto
==============================================================================
--- hive/branches/vectorization/ql/src/test/queries/clientpositive/vectorized_rcfile_columnar.q (added)
+++ hive/branches/vectorization/ql/src/test/queries/clientpositive/vectorized_rcfile_columnar.q Thu Sep 19 18:58:07 2013
@@ -0,0 +1,18 @@
+--This query must pass even when vectorized reader is not available for
+--RC files. The query must fall back to the non-vector mode and run successfully.
+
+CREATE table columnTable (key STRING, value STRING)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED AS
+  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat';
+
+FROM src
+INSERT OVERWRITE TABLE columnTable SELECT src.key, src.value LIMIT 10;
+describe columnTable;
+
+SET hive.vectorized.execution.enabled=true;
+
+SELECT key, value FROM columnTable;
+

Added: hive/branches/vectorization/ql/src/test/results/clientpositive/vectorized_rcfile_columnar.q.out
URL: http://svn.apache.org/viewvc/hive/branches/vectorization/ql/src/test/results/clientpositive/vectorized_rcfile_columnar.q.out?rev=1524806&view=auto
==============================================================================
--- hive/branches/vectorization/ql/src/test/results/clientpositive/vectorized_rcfile_columnar.q.out (added)
+++ hive/branches/vectorization/ql/src/test/results/clientpositive/vectorized_rcfile_columnar.q.out Thu Sep 19 18:58:07 2013
@@ -0,0 +1,61 @@
+PREHOOK: query: --This query must pass even when vectorized reader is not available for
+--RC files. The query must fall back to the non-vector mode and run successfully.
+
+CREATE table columnTable (key STRING, value STRING)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED AS
+  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: --This query must pass even when vectorized reader is not available for
+--RC files. The query must fall back to the non-vector mode and run successfully.
+
+CREATE table columnTable (key STRING, value STRING)
+ROW FORMAT SERDE
+  'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED AS
+  INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
+  OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@columnTable
+PREHOOK: query: FROM src
+INSERT OVERWRITE TABLE columnTable SELECT src.key, src.value LIMIT 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@columntable
+POSTHOOK: query: FROM src
+INSERT OVERWRITE TABLE columnTable SELECT src.key, src.value LIMIT 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@columntable
+POSTHOOK: Lineage: columntable.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: columntable.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: describe columnTable
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe columnTable
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: columntable.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: columntable.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+key                 	string              	None                
+value               	string              	None                
+PREHOOK: query: SELECT key, value FROM columnTable
+PREHOOK: type: QUERY
+PREHOOK: Input: default@columntable
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT key, value FROM columnTable
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@columntable
+#### A masked pattern was here ####
+POSTHOOK: Lineage: columntable.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: columntable.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
+238	val_238
+86	val_86
+311	val_311
+27	val_27
+165	val_165
+409	val_409
+255	val_255
+278	val_278
+98	val_98
+484	val_484