You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2009/12/09 19:51:01 UTC

svn commit: r888920 - in /hadoop/hive/trunk: CHANGES.txt ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/test/queries/clientpositive/transform1.q ql/src/test/results/clientpositive/transform1.q.out

Author: zshao
Date: Wed Dec  9 18:51:00 2009
New Revision: 888920

URL: http://svn.apache.org/viewvc?rev=888920&view=rev
Log:
HIVE-962 "show functions" should work with unquoted string. (Paul Yang via zshao)

Added:
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform1.q
    hadoop/hive/trunk/ql/src/test/results/clientpositive/transform1.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=888920&r1=888919&r2=888920&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Dec  9 18:51:00 2009
@@ -308,6 +308,8 @@
     HIVE-638 fix sampling in subquery for real
     (namit via He Yongqiang)
 
+    HIVE-973. Support nested types in custom scripts. (Namit Jain via zshao)
+
 Release 0.4.0 -  Unreleased
 
   INCOMPATIBLE CHANGES

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=888920&r1=888919&r2=888920&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Dec  9 18:51:00 2009
@@ -1142,8 +1142,7 @@
           assert child.getType() == HiveParser.TOK_TABCOL;
           String colAlias = unescapeIdentifier(((ASTNode)child.getChild(0)).getText());
           failIfColAliasExists(colAliasNamesDuplicateCheck, colAlias);
-          outputCols.add(new ColumnInfo(colAlias,
-                                        TypeInfoUtils.getTypeInfoFromTypeString(DDLSemanticAnalyzer.getTypeName(((ASTNode)child.getChild(1)).getType())), null, false));
+          outputCols.add(new ColumnInfo(colAlias, TypeInfoUtils.getTypeInfoFromTypeString(getTypeStringFromAST((ASTNode)child.getChild(1))), null, false));
         }
       }
     }

Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform1.q?rev=888920&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform1.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/transform1.q Wed Dec  9 18:51:00 2009
@@ -0,0 +1,23 @@
+drop table transform1_t1;
+create table transform1_t1(a string, b string);
+
+EXPLAIN
+SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1;
+
+SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1;
+
+drop table transform1_t1;
+
+drop table transform1_t2;
+create table transform1_t2(col array<int>);
+
+insert overwrite table transform1_t2
+select array(1,2,3) from src limit 1;
+
+EXPLAIN
+SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2;
+
+SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2;
+
+drop table transform1_t2;
+

Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/transform1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/transform1.q.out?rev=888920&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/transform1.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/transform1.q.out Wed Dec  9 18:51:00 2009
@@ -0,0 +1,141 @@
+PREHOOK: query: drop table transform1_t1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table transform1_t1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table transform1_t1(a string, b string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table transform1_t1(a string, b string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@transform1_t1
+PREHOOK: query: EXPLAIN
+SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF transform1_t1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST TOK_ALLCOLREF) TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER (TOK_TABCOLLIST (TOK_TABCOL col (TOK_LIST TOK_BIGINT))))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        transform1_t1 
+          TableScan
+            alias: transform1_t1
+            Select Operator
+              expressions:
+                    expr: a
+                    type: string
+                    expr: b
+                    type: string
+              outputColumnNames: _col0, _col1
+              Transform Operator
+                command: cat
+                output info:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@transform1_t1
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1737132015/10000
+POSTHOOK: query: SELECT transform(*) USING 'cat' AS (col array<bigint>) FROM transform1_t1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@transform1_t1
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/1737132015/10000
+PREHOOK: query: drop table transform1_t1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table transform1_t1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@transform1_t1
+PREHOOK: query: drop table transform1_t2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table transform1_t2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table transform1_t2(col array<int>)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table transform1_t2(col array<int>)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@transform1_t2
+PREHOOK: query: insert overwrite table transform1_t2
+select array(1,2,3) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@transform1_t2
+POSTHOOK: query: insert overwrite table transform1_t2
+select array(1,2,3) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@transform1_t2
+PREHOOK: query: EXPLAIN
+SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF transform1_t2)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST '0\0021\0022') TOK_SERDE TOK_RECORDWRITER 'cat' TOK_SERDE TOK_RECORDREADER (TOK_TABCOLLIST (TOK_TABCOL col (TOK_LIST TOK_INT))))))))
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Alias -> Map Operator Tree:
+        transform1_t2 
+          TableScan
+            alias: transform1_t2
+            Select Operator
+              expressions:
+                    expr: '012'
+                    type: string
+              outputColumnNames: _col0
+              Transform Operator
+                command: cat
+                output info:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+
+
+PREHOOK: query: SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@transform1_t2
+PREHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/389439457/10000
+POSTHOOK: query: SELECT transform('0\0021\0022') USING 'cat' AS (col array<int>) FROM transform1_t2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@transform1_t2
+POSTHOOK: Output: file:/data/users/njain/hive1/hive1/build/ql/tmp/389439457/10000
+[0,1,2]
+PREHOOK: query: drop table transform1_t2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table transform1_t2
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: default@transform1_t2