You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by rm...@apache.org on 2009/08/12 04:01:39 UTC
svn commit: r803350 - in /hadoop/hive/trunk: ./
contrib/src/test/results/clientnegative/
contrib/src/test/results/clientpositive/
ql/src/java/org/apache/hadoop/hive/ql/parse/
ql/src/java/org/apache/hadoop/hive/ql/plan/
ql/src/test/queries/clientpositiv...
Author: rmurthy
Date: Wed Aug 12 02:01:38 2009
New Revision: 803350
URL: http://svn.apache.org/viewvc?rev=803350&view=rev
Log:
HIVE-743. Let user specify serde for custom sctipts.
(Namit Jain via rmurthy)
Added:
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input33.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input34.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input35.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input36.q
hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/contrib/src/test/results/clientnegative/serde_regex.q.out
hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
hadoop/hive/trunk/ql/src/test/results/clientpositive/input15.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Aug 12 02:01:38 2009
@@ -5,6 +5,9 @@
INCOMPATIBLE CHANGES
NEW FEATURES
+
+ HIVE-743. Let user specify serde for custom sctipts.
+ (Namit Jain via rmurthy)
IMPROVEMENTS
Modified: hadoop/hive/trunk/contrib/src/test/results/clientnegative/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientnegative/serde_regex.q.out?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientnegative/serde_regex.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientnegative/serde_regex.q.out Wed Aug 12 02:01:38 2009
@@ -17,7 +17,7 @@
)
STORED AS TEXTFILE
ABSTRACT SYNTAX TREE:
- (TOK_CREATETABLE serde_regex TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL host TOK_STRING) (TOK_TABCOL identity TOK_STRING) (TOK_TABCOL user TOK_STRING) (TOK_TABCOL time TOK_STRING) (TOK_TABCOL request TOK_STRING) (TOK_TABCOL status TOK_INT) (TOK_TABCOL size TOK_INT) (TOK_TABCOL referer TOK_STRING) (TOK_TABCOL agent TOK_STRING)) (TOK_TABLESERIALIZER 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe' (TOK_TABLEPROPERTIES (TOK_TABLEPROPLIST (TOK_TABLEPROPERTY "input.regex" "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") (-|[0-9]*) (-|[0-9]*)(?: ([^ \"]*|\"[^\"]*\") ([^ \"]*|\"[^\"]*\"))?") (TOK_TABLEPROPERTY "output.format.string" "%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s")))) TOK_TBLTEXTFILE)
+ (TOK_CREATETABLE serde_regex TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL host TOK_STRING) (TOK_TABCOL identity TOK_STRING) (TOK_TABCOL user TOK_STRING) (TOK_TABCOL time TOK_STRING) (TOK_TABCOL request TOK_STRING) (TOK_TABCOL status TOK_INT) (TOK_TABCOL size TOK_INT) (TOK_TABCOL referer TOK_STRING) (TOK_TABCOL agent TOK_STRING)) (TOK_TABLESERIALIZER (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe' (TOK_TABLEPROPERTIES (TOK_TABLEPROPLIST (TOK_TABLEPROPERTY "input.regex" "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") (-|[0-9]*) (-|[0-9]*)(?: ([^ \"]*|\"[^\"]*\") ([^ \"]*|\"[^\"]*\"))?") (TOK_TABLEPROPERTY "output.format.string" "%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s"))))) TOK_TBLTEXTFILE)
STAGE DEPENDENCIES:
Stage-0 is a root stage
Modified: hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out (original)
+++ hadoop/hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out Wed Aug 12 02:01:38 2009
@@ -17,7 +17,7 @@
)
STORED AS TEXTFILE
ABSTRACT SYNTAX TREE:
- (TOK_CREATETABLE serde_regex TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL host TOK_STRING) (TOK_TABCOL identity TOK_STRING) (TOK_TABCOL user TOK_STRING) (TOK_TABCOL time TOK_STRING) (TOK_TABCOL request TOK_STRING) (TOK_TABCOL status TOK_STRING) (TOK_TABCOL size TOK_STRING) (TOK_TABCOL referer TOK_STRING) (TOK_TABCOL agent TOK_STRING)) (TOK_TABLESERIALIZER 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe' (TOK_TABLEPROPERTIES (TOK_TABLEPROPLIST (TOK_TABLEPROPERTY "input.regex" "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") (-|[0-9]*) (-|[0-9]*)(?: ([^ \"]*|\"[^\"]*\") ([^ \"]*|\"[^\"]*\"))?") (TOK_TABLEPROPERTY "output.format.string" "%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s")))) TOK_TBLTEXTFILE)
+ (TOK_CREATETABLE serde_regex TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL host TOK_STRING) (TOK_TABCOL identity TOK_STRING) (TOK_TABCOL user TOK_STRING) (TOK_TABCOL time TOK_STRING) (TOK_TABCOL request TOK_STRING) (TOK_TABCOL status TOK_STRING) (TOK_TABCOL size TOK_STRING) (TOK_TABCOL referer TOK_STRING) (TOK_TABCOL agent TOK_STRING)) (TOK_TABLESERIALIZER (TOK_SERDENAME 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe' (TOK_TABLEPROPERTIES (TOK_TABLEPROPLIST (TOK_TABLEPROPERTY "input.regex" "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") (-|[0-9]*) (-|[0-9]*)(?: ([^ \"]*|\"[^\"]*\") ([^ \"]*|\"[^\"]*\"))?") (TOK_TABLEPROPERTY "output.format.string" "%1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s"))))) TOK_TBLTEXTFILE)
STAGE DEPENDENCIES:
Stage-0 is a root stage
@@ -59,7 +59,7 @@
query: LOAD DATA LOCAL INPATH "../data/files/apache.access.2.log" INTO TABLE serde_regex
query: SELECT * FROM serde_regex ORDER BY time
Input: default/serde_regex
-Output: file:/data/users/zshao/tools/167-trunk-apache-hive/build/ql/tmp/2091262141/10000
+Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1069367320/10000
127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326 NULL NULL
127.0.0.1 - - [26/May/2009:00:00:00 +0000] "GET /someurl/?track=Blabla(Main) HTTP/1.1" 200 5864 - "Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.65 Safari/525.19"
query: DROP TABLE serde_regex
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Wed Aug 12 02:01:38 2009
@@ -216,6 +216,8 @@
}
break;
case HiveParser.TOK_TABLEROWFORMAT:
+
+ child = (ASTNode)child.getChild(0);
int numChildRowFormat = child.getChildCount();
for (int numC = 0; numC < numChildRowFormat; numC++)
{
@@ -241,6 +243,8 @@
}
break;
case HiveParser.TOK_TABLESERIALIZER:
+
+ child = (ASTNode)child.getChild(0);
serde = unescapeSQLString(child.getChild(0).getText());
if (child.getChildCount() == 2) {
mapProp = new HashMap<String, String>();
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Wed Aug 12 02:01:38 2009
@@ -50,6 +50,9 @@
TOK_TRUE;
TOK_FALSE;
TOK_TRANSFORM;
+TOK_SERDE;
+TOK_SERDENAME;
+TOK_SERDEPROPS;
TOK_EXPLIST;
TOK_ALIASLIST;
TOK_GROUPBY;
@@ -132,6 +135,8 @@
TOK_HINT;
TOK_MAPJOIN;
TOK_HINTARGLIST;
+TOK_USERSCRIPTCOLNAMES;
+TOK_USERSCRIPTCOLSCHEMA;
}
@@ -277,8 +282,8 @@
alterStatementSuffixSerdeProperties
@init { msgs.push("alter serdes statement"); }
@after { msgs.pop(); }
- : name=Identifier KW_SET KW_SERDE serde=StringLiteral (KW_WITH KW_SERDEPROPERTIES tableProperties)?
- -> ^(TOK_ALTERTABLE_SERIALIZER $name $serde tableProperties?)
+ : name=Identifier KW_SET KW_SERDE serdeName=StringLiteral (KW_WITH KW_SERDEPROPERTIES tableProperties)?
+ -> ^(TOK_ALTERTABLE_SERIALIZER $name $serdeName tableProperties?)
| name=Identifier KW_SET KW_SERDEPROPERTIES tableProperties
-> ^(TOK_ALTERTABLE_SERDEPROPERTIES $name tableProperties)
;
@@ -361,14 +366,36 @@
-> ^(TOK_TABLEBUCKETS $bucketCols $sortCols? $num)
;
+serde
+@init { msgs.push("serde specification"); }
+@after { msgs.pop(); }
+ : serdeFormat -> ^(TOK_SERDE serdeFormat)
+ | serdePropertiesFormat -> ^(TOK_SERDE serdePropertiesFormat)
+ ;
+
+serdeFormat
+@init { msgs.push("serde format specification"); }
+@after { msgs.pop(); }
+ : KW_ROW KW_FORMAT KW_SERDE name=StringLiteral (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
+ -> ^(TOK_SERDENAME $name $serdeprops?)
+ ;
+
+serdePropertiesFormat
+@init { msgs.push("serde properties specification"); }
+@after { msgs.pop(); }
+ :
+ KW_ROW KW_FORMAT KW_DELIMITED tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier?
+ -> ^(TOK_SERDEPROPS tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier?)
+ ;
+
tableRowFormat
@init { msgs.push("table row format specification"); }
@after { msgs.pop(); }
:
- KW_ROW KW_FORMAT KW_DELIMITED tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier?
- -> ^(TOK_TABLEROWFORMAT tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier?)
- | KW_ROW KW_FORMAT KW_SERDE name=StringLiteral (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
- -> ^(TOK_TABLESERIALIZER $name $serdeprops?)
+ serdePropertiesFormat
+ -> ^(TOK_TABLEROWFORMAT serdePropertiesFormat)
+ | serdeFormat
+ -> ^(TOK_TABLESERIALIZER serdeFormat)
;
tableProperties
@@ -695,7 +722,7 @@
:
( selectExpression (KW_AS? Identifier)?) -> ^(TOK_SELEXPR selectExpression Identifier?)
;
-
+
trfmClause
@init { msgs.push("transform clause"); }
@after { msgs.pop(); }
@@ -703,9 +730,9 @@
( KW_SELECT KW_TRANSFORM LPAREN selectExpressionList RPAREN
| KW_MAP selectExpressionList
| KW_REDUCE selectExpressionList )
- KW_USING StringLiteral
- (KW_AS (LPAREN aliasList RPAREN | aliasList) )?
- -> ^(TOK_TRANSFORM selectExpressionList StringLiteral aliasList?)
+ inSerde=serde? KW_USING StringLiteral
+ ( KW_AS ((LPAREN (aliasList | columnNameTypeList) RPAREN) | (aliasList | columnNameTypeList)) outSerde=serde?)?
+ -> ^(TOK_TRANSFORM selectExpressionList $inSerde? StringLiteral aliasList? columnNameTypeList? $outSerde?)
;
selectExpression
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Aug 12 02:01:38 2009
@@ -34,6 +34,7 @@
import java.util.Vector;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
+import java.lang.ClassNotFoundException;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.Path;
@@ -124,6 +125,7 @@
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
@@ -1103,46 +1105,167 @@
return cmd;
}
+ private tableDesc getTableDescFromSerDe(ASTNode child, String cols, boolean defaultCols) throws SemanticException {
+ if (child.getType() == HiveParser.TOK_SERDENAME) {
+ String serdeName = unescapeSQLString(child.getChild(0).getText());
+ Class<? extends Deserializer> serdeClass = null;
+
+ try {
+ serdeClass = (Class<? extends Deserializer>)Class.forName(serdeName);
+ } catch (ClassNotFoundException e) {
+ throw new SemanticException(e);
+ }
+
+ tableDesc tblDesc = PlanUtils.getTableDesc(serdeClass, Integer.toString(Utilities.tabCode), cols, defaultCols);
+ // copy all the properties
+ if (child.getChildCount() == 2) {
+ ASTNode prop = (ASTNode)((ASTNode)child.getChild(1)).getChild(0);
+ for (int propChild = 0; propChild < prop.getChildCount(); propChild++) {
+ String key = unescapeSQLString(prop.getChild(propChild).getChild(0).getText());
+ String value = unescapeSQLString(prop.getChild(propChild).getChild(1).getText());
+ tblDesc.getProperties().setProperty(key,value);
+ }
+ }
+ return tblDesc;
+ }
+ else if (child.getType() == HiveParser.TOK_SERDEPROPS) {
+ tableDesc tblDesc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode), cols, defaultCols);
+ int numChildRowFormat = child.getChildCount();
+ for (int numC = 0; numC < numChildRowFormat; numC++)
+ {
+ ASTNode rowChild = (ASTNode)child.getChild(numC);
+ switch (rowChild.getToken().getType()) {
+ case HiveParser.TOK_TABLEROWFORMATFIELD:
+ String fieldDelim = unescapeSQLString(rowChild.getChild(0).getText());
+ tblDesc.getProperties().setProperty(Constants.FIELD_DELIM, fieldDelim);
+ tblDesc.getProperties().setProperty(Constants.SERIALIZATION_FORMAT, fieldDelim);
+
+ if (rowChild.getChildCount()>=2) {
+ String fieldEscape = unescapeSQLString(rowChild.getChild(1).getText());
+ tblDesc.getProperties().setProperty(Constants.ESCAPE_CHAR, fieldDelim);
+ }
+ break;
+ case HiveParser.TOK_TABLEROWFORMATCOLLITEMS:
+ tblDesc.getProperties().setProperty(Constants.COLLECTION_DELIM, unescapeSQLString(rowChild.getChild(0).getText()));
+ break;
+ case HiveParser.TOK_TABLEROWFORMATMAPKEYS:
+ tblDesc.getProperties().setProperty(Constants.MAPKEY_DELIM, unescapeSQLString(rowChild.getChild(0).getText()));
+ break;
+ case HiveParser.TOK_TABLEROWFORMATLINES:
+ tblDesc.getProperties().setProperty(Constants.LINE_DELIM, unescapeSQLString(rowChild.getChild(0).getText()));
+ break;
+ default: assert false;
+ }
+ }
+
+ return tblDesc;
+ }
+
+ // should never come here
+ return null;
+ }
@SuppressWarnings("nls")
private Operator genScriptPlan(ASTNode trfm, QB qb,
Operator input) throws SemanticException {
// If there is no "AS" clause, the output schema will be "key,value"
- ArrayList<String> outputColList = new ArrayList<String>();
- boolean defaultOutputColList = (trfm.getChildCount() < 3);
+ ArrayList<ColumnInfo> outputCols = new ArrayList<ColumnInfo>();
+ boolean defaultOutputColList = true;
+ int inputSerDeChildNum = -1, outputSerDeChildNum = -1;
+ int outputColumnNamesPos = -1, outputColumnSchemaPos = -1;
+ int execPos = 1;
+
+ // Go over all the children
+ for (int pos = 0; pos < trfm.getChildCount(); pos++) {
+ ASTNode child = (ASTNode)trfm.getChild(pos);
+ if (child.getType() == HiveParser.TOK_ALIASLIST) {
+ defaultOutputColList = false;
+ outputColumnNamesPos = pos;
+ break;
+ }
+ else if (child.getType() == HiveParser.TOK_TABCOLLIST) {
+ defaultOutputColList = false;
+ outputColumnSchemaPos = pos;
+ break;
+ }
+ }
+
+ // input serde specified
+ if ((trfm.getChildCount() > 1) &&
+ (trfm.getChild(1).getType() == HiveParser.TOK_SERDE)) {
+ inputSerDeChildNum = 1;
+ execPos++;
+ }
+
+ // output serde specified
+ int checkChildNum = -1;
+ if (outputColumnNamesPos >= 0)
+ checkChildNum = outputColumnNamesPos + 1;
+ else if (outputColumnSchemaPos >= 0)
+ checkChildNum = outputColumnSchemaPos + 1;
+
+ if (checkChildNum >= 0) {
+ if ((trfm.getChildCount() > (checkChildNum))
+ && (trfm.getChild(checkChildNum).getType() == HiveParser.TOK_SERDE))
+ outputSerDeChildNum = checkChildNum;
+ }
+
+ // If column type is not specified, use a string
if (defaultOutputColList) {
- outputColList.add("key");
- outputColList.add("value");
- } else {
- ASTNode collist = (ASTNode) trfm.getChild(2);
+ outputCols.add(new ColumnInfo("key", TypeInfoFactory.stringTypeInfo, null, false));
+ outputCols.add(new ColumnInfo("value", TypeInfoFactory.stringTypeInfo, null, false));
+ }
+ else if (outputColumnNamesPos >= 0) {
+ ASTNode collist = (ASTNode) trfm.getChild(outputColumnNamesPos);
int ccount = collist.getChildCount();
for (int i=0; i < ccount; ++i) {
- outputColList.add(unescapeIdentifier(((ASTNode)collist.getChild(i)).getText()));
+ outputCols.add(new ColumnInfo(unescapeIdentifier(((ASTNode)collist.getChild(i)).getText()), TypeInfoFactory.stringTypeInfo, null, false));
}
}
-
+ else {
+ assert outputColumnSchemaPos >= 0;
+ ASTNode collist = (ASTNode) trfm.getChild(outputColumnSchemaPos);
+ int ccount = collist.getChildCount();
+ for (int i=0; i < ccount; ++i) {
+ ASTNode child = (ASTNode) collist.getChild(i);
+ assert child.getType() == HiveParser.TOK_TABCOL;
+ outputCols.add(new ColumnInfo(unescapeIdentifier(((ASTNode)child.getChild(0)).getText()),
+ TypeInfoUtils.getTypeInfoFromTypeString(DDLSemanticAnalyzer.getTypeName(((ASTNode)child.getChild(1)).getType())), null, false));
+ }
+ }
+
RowResolver out_rwsch = new RowResolver();
StringBuilder columns = new StringBuilder();
- for (int i = 0; i < outputColList.size(); ++i) {
+ for (int i = 0; i < outputCols.size(); ++i) {
if (i != 0) {
columns.append(",");
}
- columns.append(outputColList.get(i));
+ columns.append(outputCols.get(i).getInternalName());
out_rwsch.put(
qb.getParseInfo().getAlias(),
- outputColList.get(i),
- new ColumnInfo(outputColList.get(i), TypeInfoFactory.stringTypeInfo, null, false) // Script output is always a string
- );
+ outputCols.get(i).getInternalName(),
+ outputCols.get(i));
}
+ tableDesc outInfo;
+ tableDesc inInfo;
+
+ if (inputSerDeChildNum < 0)
+ inInfo = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), "");
+ else
+ inInfo = getTableDescFromSerDe((ASTNode)(((ASTNode)trfm.getChild(inputSerDeChildNum))).getChild(0), "", false);
+
+ if (outputSerDeChildNum < 0)
+ outInfo = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), columns.toString(), defaultOutputColList);
+ else
+ outInfo = getTableDescFromSerDe((ASTNode)(((ASTNode)trfm.getChild(outputSerDeChildNum))).getChild(0), columns.toString(), defaultOutputColList);
+
Operator output = putOpInsertMap(OperatorFactory
.getAndMakeChild(
new scriptDesc(
- getFixedCmd(stripQuotes(trfm.getChild(1).getText())),
- PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), columns.toString(), defaultOutputColList),
- PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.tabCode), "")),
- new RowSchema(
- out_rwsch.getColumnInfos()), input), out_rwsch);
+ getFixedCmd(stripQuotes(trfm.getChild(execPos).getText())),
+ outInfo, inInfo),
+ new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch);
return output;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Wed Aug 12 02:01:38 2009
@@ -40,6 +40,7 @@
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.thrift.protocol.TBinaryProtocol;
@@ -68,6 +69,14 @@
}
/**
+ * Generate the table descriptor of given serde with the separatorCode
+ * and column names (comma separated string).
+ */
+ public static tableDesc getTableDesc(Class<? extends Deserializer> serdeClass, String separatorCode, String columns) {
+ return getTableDesc(serdeClass, separatorCode, columns, false);
+ }
+
+ /**
* Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode
* and column names (comma separated string), and whether the last column should take
* the rest of the line.
@@ -78,12 +87,30 @@
}
/**
+ * Generate the table descriptor of the serde specified with the separatorCode
+ * and column names (comma separated string), and whether the last column should take
+ * the rest of the line.
+ */
+ public static tableDesc getTableDesc(Class<? extends Deserializer> serdeClass,
+ String separatorCode, String columns,
+ boolean lastColumnTakesRestOfTheLine) {
+ return getTableDesc(serdeClass, separatorCode, columns, null, lastColumnTakesRestOfTheLine);
+ }
+
+ /**
* Generate the table descriptor of MetadataTypedColumnsetSerDe with the separatorCode
* and column names (comma separated string), and whether the last column should take
* the rest of the line.
*/
public static tableDesc getDefaultTableDesc(String separatorCode, String columns, String columnTypes,
boolean lastColumnTakesRestOfTheLine) {
+ return getTableDesc(LazySimpleSerDe.class, separatorCode, columns, columnTypes,
+ lastColumnTakesRestOfTheLine);
+ }
+
+ public static tableDesc getTableDesc(Class<? extends Deserializer> serdeClass,
+ String separatorCode, String columns, String columnTypes,
+ boolean lastColumnTakesRestOfTheLine) {
Properties properties = Utilities.makeProperties(
Constants.SERIALIZATION_FORMAT, separatorCode,
Constants.LIST_COLUMNS, columns);
@@ -96,10 +123,10 @@
"true");
}
return new tableDesc(
- LazySimpleSerDe.class,
- TextInputFormat.class,
- IgnoreKeyTextOutputFormat.class,
- properties);
+ serdeClass,
+ TextInputFormat.class,
+ IgnoreKeyTextOutputFormat.class,
+ properties);
}
/**
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input33.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input33.q?rev=803350&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input33.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input33.q Wed Aug 12 02:01:38 2009
@@ -0,0 +1,28 @@
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
+
+EXPLAIN
+FROM (
+ FROM src
+ MAP src.key, src.key
+ USING 'cat'
+ DISTRIBUTE BY key
+ SORT BY key, value
+) tmap
+INSERT OVERWRITE TABLE dest1
+REDUCE tmap.key, tmap.value
+USING '../data/scripts/input20_script'
+AS (key STRING, value STRING);
+
+FROM (
+ FROM src
+ MAP src.key, src.key
+ USING 'cat'
+ DISTRIBUTE BY key
+ SORT BY key, value
+) tmap
+INSERT OVERWRITE TABLE dest1
+REDUCE tmap.key, tmap.value
+USING '../data/scripts/input20_script'
+AS (key STRING, value STRING);
+
+SELECT * FROM dest1 SORT BY key, value;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input34.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input34.q?rev=803350&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input34.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input34.q Wed Aug 12 02:01:38 2009
@@ -0,0 +1,20 @@
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
+
+EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue;
+
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue;
+
+SELECT dest1.* FROM dest1;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input35.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input35.q?rev=803350&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input35.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input35.q Wed Aug 12 02:01:38 2009
@@ -0,0 +1,20 @@
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
+
+EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue;
+
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue;
+
+SELECT dest1.* FROM dest1;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input36.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input36.q?rev=803350&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input36.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input36.q Wed Aug 12 02:01:38 2009
@@ -0,0 +1,20 @@
+CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
+
+EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\003'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue;
+
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\003'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue;
+
+SELECT dest1.* FROM dest1;
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input15.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input15.q.out?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input15.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input15.q.out Wed Aug 12 02:01:38 2009
@@ -1,7 +1,7 @@
query: EXPLAIN
CREATE TABLE TEST15(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE
ABSTRACT SYNTAX TREE:
- (TOK_CREATETABLE TEST15 TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_TABLEROWFORMATFIELD '\t')) TOK_TBLTEXTFILE)
+ (TOK_CREATETABLE TEST15 TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\t'))) TOK_TBLTEXTFILE)
STAGE DEPENDENCIES:
Stage-0 is a root stage
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out?rev=803350&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input33.q.out Wed Aug 12 02:01:38 2009
@@ -0,0 +1,421 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src
+ MAP src.key, src.key
+ USING 'cat'
+ DISTRIBUTE BY key
+ SORT BY key, value
+) tmap
+INSERT OVERWRITE TABLE dest1
+REDUCE tmap.key, tmap.value
+USING '../data/scripts/input20_script'
+AS (key STRING, value STRING)
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) key)) 'cat'))) (TOK_DISTRIBUTEBY (TOK_TABLE_OR_COL key)) (TOK_SORTBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)) (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL value))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL tmap) key) (. (TOK_TABLE_OR_COL tmap) value)) '../data/scripts/input20_script' (TOK_TABCOLLIST (TOK_TABCOL key TOK_STRING) (TOK_TABCOL value TOK_STRING)))))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ tmap:src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: key
+ type: string
+ outputColumnNames: _col0, _col1
+ Transform Operator
+ command: cat
+ output info:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ Reduce Output Operator
+ key expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ sort order: ++
+ Map-reduce partition columns:
+ expr: key
+ type: string
+ tag: -1
+ value expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Extract
+ Select Operator
+ expressions:
+ expr: _col0
+ type: string
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ Transform Operator
+ command: ../data/scripts/input20_script
+ output info:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ Select Operator
+ expressions:
+ expr: UDFToInteger(key)
+ type: int
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+
+query: FROM (
+ FROM src
+ MAP src.key, src.key
+ USING 'cat'
+ DISTRIBUTE BY key
+ SORT BY key, value
+) tmap
+INSERT OVERWRITE TABLE dest1
+REDUCE tmap.key, tmap.value
+USING '../data/scripts/input20_script'
+AS (key STRING, value STRING)
+Input: default/src
+Output: default/dest1
+query: SELECT * FROM dest1 SORT BY key, value
+Input: default/dest1
+Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/410502456/10000
+1 105_105
+1 10_10
+1 111_111
+1 114_114
+1 116_116
+1 11_11
+1 126_126
+1 131_131
+1 133_133
+1 136_136
+1 143_143
+1 145_145
+1 150_150
+1 153_153
+1 155_155
+1 156_156
+1 157_157
+1 158_158
+1 160_160
+1 162_162
+1 163_163
+1 166_166
+1 168_168
+1 170_170
+1 177_177
+1 178_178
+1 17_17
+1 180_180
+1 181_181
+1 183_183
+1 186_186
+1 189_189
+1 190_190
+1 192_192
+1 194_194
+1 196_196
+1 19_19
+1 201_201
+1 202_202
+1 20_20
+1 214_214
+1 218_218
+1 222_222
+1 226_226
+1 228_228
+1 235_235
+1 241_241
+1 244_244
+1 247_247
+1 248_248
+1 249_249
+1 252_252
+1 257_257
+1 258_258
+1 260_260
+1 262_262
+1 263_263
+1 266_266
+1 274_274
+1 275_275
+1 27_27
+1 283_283
+1 284_284
+1 285_285
+1 286_286
+1 287_287
+1 289_289
+1 28_28
+1 291_291
+1 292_292
+1 296_296
+1 2_2
+1 302_302
+1 305_305
+1 306_306
+1 308_308
+1 30_30
+1 310_310
+1 315_315
+1 323_323
+1 332_332
+1 335_335
+1 336_336
+1 338_338
+1 339_339
+1 33_33
+1 341_341
+1 345_345
+1 34_34
+1 351_351
+1 356_356
+1 360_360
+1 362_362
+1 364_364
+1 365_365
+1 366_366
+1 368_368
+1 373_373
+1 374_374
+1 375_375
+1 377_377
+1 378_378
+1 379_379
+1 386_386
+1 389_389
+1 392_392
+1 393_393
+1 394_394
+1 400_400
+1 402_402
+1 407_407
+1 411_411
+1 418_418
+1 419_419
+1 41_41
+1 421_421
+1 427_427
+1 432_432
+1 435_435
+1 436_436
+1 437_437
+1 43_43
+1 443_443
+1 444_444
+1 446_446
+1 448_448
+1 449_449
+1 44_44
+1 452_452
+1 453_453
+1 455_455
+1 457_457
+1 460_460
+1 467_467
+1 470_470
+1 472_472
+1 475_475
+1 477_477
+1 479_479
+1 47_47
+1 481_481
+1 482_482
+1 483_483
+1 484_484
+1 485_485
+1 487_487
+1 490_490
+1 491_491
+1 493_493
+1 494_494
+1 495_495
+1 496_496
+1 497_497
+1 4_4
+1 53_53
+1 54_54
+1 57_57
+1 64_64
+1 65_65
+1 66_66
+1 69_69
+1 74_74
+1 77_77
+1 78_78
+1 80_80
+1 82_82
+1 85_85
+1 86_86
+1 87_87
+1 8_8
+1 92_92
+1 96_96
+1 9_9
+2 100_100
+2 103_103
+2 104_104
+2 113_113
+2 118_118
+2 120_120
+2 125_125
+2 129_129
+2 12_12
+2 134_134
+2 137_137
+2 146_146
+2 149_149
+2 152_152
+2 15_15
+2 164_164
+2 165_165
+2 172_172
+2 174_174
+2 175_175
+2 176_176
+2 179_179
+2 18_18
+2 191_191
+2 195_195
+2 197_197
+2 200_200
+2 203_203
+2 205_205
+2 207_207
+2 209_209
+2 213_213
+2 216_216
+2 217_217
+2 219_219
+2 221_221
+2 223_223
+2 224_224
+2 229_229
+2 233_233
+2 237_237
+2 238_238
+2 239_239
+2 242_242
+2 24_24
+2 255_255
+2 256_256
+2 265_265
+2 26_26
+2 272_272
+2 278_278
+2 280_280
+2 281_281
+2 282_282
+2 288_288
+2 307_307
+2 309_309
+2 317_317
+2 321_321
+2 322_322
+2 325_325
+2 331_331
+2 333_333
+2 342_342
+2 344_344
+2 353_353
+2 367_367
+2 37_37
+2 382_382
+2 395_395
+2 397_397
+2 399_399
+2 404_404
+2 413_413
+2 414_414
+2 424_424
+2 429_429
+2 42_42
+2 439_439
+2 458_458
+2 459_459
+2 462_462
+2 463_463
+2 478_478
+2 492_492
+2 51_51
+2 58_58
+2 67_67
+2 72_72
+2 76_76
+2 83_83
+2 84_84
+2 95_95
+2 97_97
+2 98_98
+3 0_0
+3 119_119
+3 128_128
+3 167_167
+3 187_187
+3 193_193
+3 199_199
+3 208_208
+3 273_273
+3 298_298
+3 311_311
+3 316_316
+3 318_318
+3 327_327
+3 35_35
+3 369_369
+3 384_384
+3 396_396
+3 403_403
+3 409_409
+3 417_417
+3 430_430
+3 431_431
+3 438_438
+3 454_454
+3 466_466
+3 480_480
+3 498_498
+3 5_5
+3 70_70
+3 90_90
+4 138_138
+4 169_169
+4 277_277
+4 406_406
+4 468_468
+4 489_489
+5 230_230
+5 348_348
+5 401_401
+5 469_469
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out?rev=803350&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input34.q.out Wed Aug 12 02:01:38 2009
@@ -0,0 +1,614 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe')) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDENAME 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-4 depends on stages: Stage-1
+ Stage-0 depends on stages: Stage-4
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ tmap:src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ Transform Operator
+ command: /bin/cat
+ output info:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ Select Operator
+ expressions:
+ expr: tkey
+ type: string
+ expr: tvalue
+ type: string
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(_col0)
+ type: int
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+ Stage: Stage-4
+ Conditional Operator
+ list of dependent Tasks:
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/1093532122/10000
+ Map Reduce
+ Alias -> Map Operator Tree:
+ file:/data/users/njain/hive3/hive3/build/ql/tmp/754709366/10002
+ Reduce Output Operator
+ sort order:
+ Map-reduce partition columns:
+ expr: rand()
+ type: double
+ tag: -1
+ value expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Extract
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/248092577/10000
+238 val_238
+86 val_86
+311 val_311
+27 val_27
+165 val_165
+409 val_409
+255 val_255
+278 val_278
+98 val_98
+484 val_484
+265 val_265
+193 val_193
+401 val_401
+150 val_150
+273 val_273
+224 val_224
+369 val_369
+66 val_66
+128 val_128
+213 val_213
+146 val_146
+406 val_406
+429 val_429
+374 val_374
+152 val_152
+469 val_469
+145 val_145
+495 val_495
+37 val_37
+327 val_327
+281 val_281
+277 val_277
+209 val_209
+15 val_15
+82 val_82
+403 val_403
+166 val_166
+417 val_417
+430 val_430
+252 val_252
+292 val_292
+219 val_219
+287 val_287
+153 val_153
+193 val_193
+338 val_338
+446 val_446
+459 val_459
+394 val_394
+237 val_237
+482 val_482
+174 val_174
+413 val_413
+494 val_494
+207 val_207
+199 val_199
+466 val_466
+208 val_208
+174 val_174
+399 val_399
+396 val_396
+247 val_247
+417 val_417
+489 val_489
+162 val_162
+377 val_377
+397 val_397
+309 val_309
+365 val_365
+266 val_266
+439 val_439
+342 val_342
+367 val_367
+325 val_325
+167 val_167
+195 val_195
+475 val_475
+17 val_17
+113 val_113
+155 val_155
+203 val_203
+339 val_339
+0 val_0
+455 val_455
+128 val_128
+311 val_311
+316 val_316
+57 val_57
+302 val_302
+205 val_205
+149 val_149
+438 val_438
+345 val_345
+129 val_129
+170 val_170
+20 val_20
+489 val_489
+157 val_157
+378 val_378
+221 val_221
+92 val_92
+111 val_111
+47 val_47
+72 val_72
+4 val_4
+280 val_280
+35 val_35
+427 val_427
+277 val_277
+208 val_208
+356 val_356
+399 val_399
+169 val_169
+382 val_382
+498 val_498
+125 val_125
+386 val_386
+437 val_437
+469 val_469
+192 val_192
+286 val_286
+187 val_187
+176 val_176
+54 val_54
+459 val_459
+51 val_51
+138 val_138
+103 val_103
+239 val_239
+213 val_213
+216 val_216
+430 val_430
+278 val_278
+176 val_176
+289 val_289
+221 val_221
+65 val_65
+318 val_318
+332 val_332
+311 val_311
+275 val_275
+137 val_137
+241 val_241
+83 val_83
+333 val_333
+180 val_180
+284 val_284
+12 val_12
+230 val_230
+181 val_181
+67 val_67
+260 val_260
+404 val_404
+384 val_384
+489 val_489
+353 val_353
+373 val_373
+272 val_272
+138 val_138
+217 val_217
+84 val_84
+348 val_348
+466 val_466
+58 val_58
+8 val_8
+411 val_411
+230 val_230
+208 val_208
+348 val_348
+24 val_24
+463 val_463
+431 val_431
+179 val_179
+172 val_172
+42 val_42
+129 val_129
+158 val_158
+119 val_119
+496 val_496
+0 val_0
+322 val_322
+197 val_197
+468 val_468
+393 val_393
+454 val_454
+100 val_100
+298 val_298
+199 val_199
+191 val_191
+418 val_418
+96 val_96
+26 val_26
+165 val_165
+327 val_327
+230 val_230
+205 val_205
+120 val_120
+131 val_131
+51 val_51
+404 val_404
+43 val_43
+436 val_436
+156 val_156
+469 val_469
+468 val_468
+308 val_308
+95 val_95
+196 val_196
+288 val_288
+481 val_481
+457 val_457
+98 val_98
+282 val_282
+197 val_197
+187 val_187
+318 val_318
+318 val_318
+409 val_409
+470 val_470
+137 val_137
+369 val_369
+316 val_316
+169 val_169
+413 val_413
+85 val_85
+77 val_77
+0 val_0
+490 val_490
+87 val_87
+364 val_364
+179 val_179
+118 val_118
+134 val_134
+395 val_395
+282 val_282
+138 val_138
+238 val_238
+419 val_419
+15 val_15
+118 val_118
+72 val_72
+90 val_90
+307 val_307
+19 val_19
+435 val_435
+10 val_10
+277 val_277
+273 val_273
+306 val_306
+224 val_224
+309 val_309
+389 val_389
+327 val_327
+242 val_242
+369 val_369
+392 val_392
+272 val_272
+331 val_331
+401 val_401
+242 val_242
+452 val_452
+177 val_177
+226 val_226
+5 val_5
+497 val_497
+402 val_402
+396 val_396
+317 val_317
+395 val_395
+58 val_58
+35 val_35
+336 val_336
+95 val_95
+11 val_11
+168 val_168
+34 val_34
+229 val_229
+233 val_233
+143 val_143
+472 val_472
+322 val_322
+498 val_498
+160 val_160
+195 val_195
+42 val_42
+321 val_321
+430 val_430
+119 val_119
+489 val_489
+458 val_458
+78 val_78
+76 val_76
+41 val_41
+223 val_223
+492 val_492
+149 val_149
+449 val_449
+218 val_218
+228 val_228
+138 val_138
+453 val_453
+30 val_30
+209 val_209
+64 val_64
+468 val_468
+76 val_76
+74 val_74
+342 val_342
+69 val_69
+230 val_230
+33 val_33
+368 val_368
+103 val_103
+296 val_296
+113 val_113
+216 val_216
+367 val_367
+344 val_344
+167 val_167
+274 val_274
+219 val_219
+239 val_239
+485 val_485
+116 val_116
+223 val_223
+256 val_256
+263 val_263
+70 val_70
+487 val_487
+480 val_480
+401 val_401
+288 val_288
+191 val_191
+5 val_5
+244 val_244
+438 val_438
+128 val_128
+467 val_467
+432 val_432
+202 val_202
+316 val_316
+229 val_229
+469 val_469
+463 val_463
+280 val_280
+2 val_2
+35 val_35
+283 val_283
+331 val_331
+235 val_235
+80 val_80
+44 val_44
+193 val_193
+321 val_321
+335 val_335
+104 val_104
+466 val_466
+366 val_366
+175 val_175
+403 val_403
+483 val_483
+53 val_53
+105 val_105
+257 val_257
+406 val_406
+409 val_409
+190 val_190
+406 val_406
+401 val_401
+114 val_114
+258 val_258
+90 val_90
+203 val_203
+262 val_262
+348 val_348
+424 val_424
+12 val_12
+396 val_396
+201 val_201
+217 val_217
+164 val_164
+431 val_431
+454 val_454
+478 val_478
+298 val_298
+125 val_125
+431 val_431
+164 val_164
+424 val_424
+187 val_187
+382 val_382
+5 val_5
+70 val_70
+397 val_397
+480 val_480
+291 val_291
+24 val_24
+351 val_351
+255 val_255
+104 val_104
+70 val_70
+163 val_163
+438 val_438
+119 val_119
+414 val_414
+200 val_200
+491 val_491
+237 val_237
+439 val_439
+360 val_360
+248 val_248
+479 val_479
+305 val_305
+417 val_417
+199 val_199
+444 val_444
+120 val_120
+429 val_429
+169 val_169
+443 val_443
+323 val_323
+325 val_325
+277 val_277
+230 val_230
+478 val_478
+178 val_178
+468 val_468
+310 val_310
+317 val_317
+333 val_333
+493 val_493
+460 val_460
+207 val_207
+249 val_249
+265 val_265
+480 val_480
+83 val_83
+136 val_136
+353 val_353
+172 val_172
+214 val_214
+462 val_462
+233 val_233
+406 val_406
+133 val_133
+175 val_175
+189 val_189
+454 val_454
+375 val_375
+401 val_401
+421 val_421
+407 val_407
+384 val_384
+256 val_256
+26 val_26
+134 val_134
+67 val_67
+384 val_384
+379 val_379
+18 val_18
+462 val_462
+492 val_492
+100 val_100
+298 val_298
+9 val_9
+341 val_341
+498 val_498
+146 val_146
+458 val_458
+362 val_362
+186 val_186
+285 val_285
+348 val_348
+167 val_167
+18 val_18
+273 val_273
+183 val_183
+281 val_281
+344 val_344
+97 val_97
+469 val_469
+315 val_315
+84 val_84
+28 val_28
+37 val_37
+448 val_448
+152 val_152
+348 val_348
+307 val_307
+194 val_194
+414 val_414
+477 val_477
+222 val_222
+126 val_126
+90 val_90
+169 val_169
+403 val_403
+400 val_400
+200 val_200
+97 val_97
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out?rev=803350&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input35.q.out Wed Aug 12 02:01:38 2009
@@ -0,0 +1,614 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002')))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-4 depends on stages: Stage-1
+ Stage-0 depends on stages: Stage-4
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ tmap:src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ Transform Operator
+ command: /bin/cat
+ output info:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ Select Operator
+ expressions:
+ expr: tkey
+ type: string
+ expr: tvalue
+ type: string
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(_col0)
+ type: int
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+ Stage: Stage-4
+ Conditional Operator
+ list of dependent Tasks:
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/525283863/10000
+ Map Reduce
+ Alias -> Map Operator Tree:
+ file:/data/users/njain/hive3/hive3/build/ql/tmp/84195792/10002
+ Reduce Output Operator
+ sort order:
+ Map-reduce partition columns:
+ expr: rand()
+ type: double
+ tag: -1
+ value expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Extract
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1714970451/10000
+238 val_238
+86 val_86
+311 val_311
+27 val_27
+165 val_165
+409 val_409
+255 val_255
+278 val_278
+98 val_98
+484 val_484
+265 val_265
+193 val_193
+401 val_401
+150 val_150
+273 val_273
+224 val_224
+369 val_369
+66 val_66
+128 val_128
+213 val_213
+146 val_146
+406 val_406
+429 val_429
+374 val_374
+152 val_152
+469 val_469
+145 val_145
+495 val_495
+37 val_37
+327 val_327
+281 val_281
+277 val_277
+209 val_209
+15 val_15
+82 val_82
+403 val_403
+166 val_166
+417 val_417
+430 val_430
+252 val_252
+292 val_292
+219 val_219
+287 val_287
+153 val_153
+193 val_193
+338 val_338
+446 val_446
+459 val_459
+394 val_394
+237 val_237
+482 val_482
+174 val_174
+413 val_413
+494 val_494
+207 val_207
+199 val_199
+466 val_466
+208 val_208
+174 val_174
+399 val_399
+396 val_396
+247 val_247
+417 val_417
+489 val_489
+162 val_162
+377 val_377
+397 val_397
+309 val_309
+365 val_365
+266 val_266
+439 val_439
+342 val_342
+367 val_367
+325 val_325
+167 val_167
+195 val_195
+475 val_475
+17 val_17
+113 val_113
+155 val_155
+203 val_203
+339 val_339
+0 val_0
+455 val_455
+128 val_128
+311 val_311
+316 val_316
+57 val_57
+302 val_302
+205 val_205
+149 val_149
+438 val_438
+345 val_345
+129 val_129
+170 val_170
+20 val_20
+489 val_489
+157 val_157
+378 val_378
+221 val_221
+92 val_92
+111 val_111
+47 val_47
+72 val_72
+4 val_4
+280 val_280
+35 val_35
+427 val_427
+277 val_277
+208 val_208
+356 val_356
+399 val_399
+169 val_169
+382 val_382
+498 val_498
+125 val_125
+386 val_386
+437 val_437
+469 val_469
+192 val_192
+286 val_286
+187 val_187
+176 val_176
+54 val_54
+459 val_459
+51 val_51
+138 val_138
+103 val_103
+239 val_239
+213 val_213
+216 val_216
+430 val_430
+278 val_278
+176 val_176
+289 val_289
+221 val_221
+65 val_65
+318 val_318
+332 val_332
+311 val_311
+275 val_275
+137 val_137
+241 val_241
+83 val_83
+333 val_333
+180 val_180
+284 val_284
+12 val_12
+230 val_230
+181 val_181
+67 val_67
+260 val_260
+404 val_404
+384 val_384
+489 val_489
+353 val_353
+373 val_373
+272 val_272
+138 val_138
+217 val_217
+84 val_84
+348 val_348
+466 val_466
+58 val_58
+8 val_8
+411 val_411
+230 val_230
+208 val_208
+348 val_348
+24 val_24
+463 val_463
+431 val_431
+179 val_179
+172 val_172
+42 val_42
+129 val_129
+158 val_158
+119 val_119
+496 val_496
+0 val_0
+322 val_322
+197 val_197
+468 val_468
+393 val_393
+454 val_454
+100 val_100
+298 val_298
+199 val_199
+191 val_191
+418 val_418
+96 val_96
+26 val_26
+165 val_165
+327 val_327
+230 val_230
+205 val_205
+120 val_120
+131 val_131
+51 val_51
+404 val_404
+43 val_43
+436 val_436
+156 val_156
+469 val_469
+468 val_468
+308 val_308
+95 val_95
+196 val_196
+288 val_288
+481 val_481
+457 val_457
+98 val_98
+282 val_282
+197 val_197
+187 val_187
+318 val_318
+318 val_318
+409 val_409
+470 val_470
+137 val_137
+369 val_369
+316 val_316
+169 val_169
+413 val_413
+85 val_85
+77 val_77
+0 val_0
+490 val_490
+87 val_87
+364 val_364
+179 val_179
+118 val_118
+134 val_134
+395 val_395
+282 val_282
+138 val_138
+238 val_238
+419 val_419
+15 val_15
+118 val_118
+72 val_72
+90 val_90
+307 val_307
+19 val_19
+435 val_435
+10 val_10
+277 val_277
+273 val_273
+306 val_306
+224 val_224
+309 val_309
+389 val_389
+327 val_327
+242 val_242
+369 val_369
+392 val_392
+272 val_272
+331 val_331
+401 val_401
+242 val_242
+452 val_452
+177 val_177
+226 val_226
+5 val_5
+497 val_497
+402 val_402
+396 val_396
+317 val_317
+395 val_395
+58 val_58
+35 val_35
+336 val_336
+95 val_95
+11 val_11
+168 val_168
+34 val_34
+229 val_229
+233 val_233
+143 val_143
+472 val_472
+322 val_322
+498 val_498
+160 val_160
+195 val_195
+42 val_42
+321 val_321
+430 val_430
+119 val_119
+489 val_489
+458 val_458
+78 val_78
+76 val_76
+41 val_41
+223 val_223
+492 val_492
+149 val_149
+449 val_449
+218 val_218
+228 val_228
+138 val_138
+453 val_453
+30 val_30
+209 val_209
+64 val_64
+468 val_468
+76 val_76
+74 val_74
+342 val_342
+69 val_69
+230 val_230
+33 val_33
+368 val_368
+103 val_103
+296 val_296
+113 val_113
+216 val_216
+367 val_367
+344 val_344
+167 val_167
+274 val_274
+219 val_219
+239 val_239
+485 val_485
+116 val_116
+223 val_223
+256 val_256
+263 val_263
+70 val_70
+487 val_487
+480 val_480
+401 val_401
+288 val_288
+191 val_191
+5 val_5
+244 val_244
+438 val_438
+128 val_128
+467 val_467
+432 val_432
+202 val_202
+316 val_316
+229 val_229
+469 val_469
+463 val_463
+280 val_280
+2 val_2
+35 val_35
+283 val_283
+331 val_331
+235 val_235
+80 val_80
+44 val_44
+193 val_193
+321 val_321
+335 val_335
+104 val_104
+466 val_466
+366 val_366
+175 val_175
+403 val_403
+483 val_483
+53 val_53
+105 val_105
+257 val_257
+406 val_406
+409 val_409
+190 val_190
+406 val_406
+401 val_401
+114 val_114
+258 val_258
+90 val_90
+203 val_203
+262 val_262
+348 val_348
+424 val_424
+12 val_12
+396 val_396
+201 val_201
+217 val_217
+164 val_164
+431 val_431
+454 val_454
+478 val_478
+298 val_298
+125 val_125
+431 val_431
+164 val_164
+424 val_424
+187 val_187
+382 val_382
+5 val_5
+70 val_70
+397 val_397
+480 val_480
+291 val_291
+24 val_24
+351 val_351
+255 val_255
+104 val_104
+70 val_70
+163 val_163
+438 val_438
+119 val_119
+414 val_414
+200 val_200
+491 val_491
+237 val_237
+439 val_439
+360 val_360
+248 val_248
+479 val_479
+305 val_305
+417 val_417
+199 val_199
+444 val_444
+120 val_120
+429 val_429
+169 val_169
+443 val_443
+323 val_323
+325 val_325
+277 val_277
+230 val_230
+478 val_478
+178 val_178
+468 val_468
+310 val_310
+317 val_317
+333 val_333
+493 val_493
+460 val_460
+207 val_207
+249 val_249
+265 val_265
+480 val_480
+83 val_83
+136 val_136
+353 val_353
+172 val_172
+214 val_214
+462 val_462
+233 val_233
+406 val_406
+133 val_133
+175 val_175
+189 val_189
+454 val_454
+375 val_375
+401 val_401
+421 val_421
+407 val_407
+384 val_384
+256 val_256
+26 val_26
+134 val_134
+67 val_67
+384 val_384
+379 val_379
+18 val_18
+462 val_462
+492 val_492
+100 val_100
+298 val_298
+9 val_9
+341 val_341
+498 val_498
+146 val_146
+458 val_458
+362 val_362
+186 val_186
+285 val_285
+348 val_348
+167 val_167
+18 val_18
+273 val_273
+183 val_183
+281 val_281
+344 val_344
+97 val_97
+469 val_469
+315 val_315
+84 val_84
+28 val_28
+37 val_37
+448 val_448
+152 val_152
+348 val_348
+307 val_307
+194 val_194
+414 val_414
+477 val_477
+222 val_222
+126 val_126
+90 val_90
+169 val_169
+403 val_403
+400 val_400
+200 val_200
+97 val_97
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out?rev=803350&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input36.q.out Wed Aug 12 02:01:38 2009
@@ -0,0 +1,614 @@
+query: CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE
+query: EXPLAIN
+FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\003'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF src)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\002'))) '/bin/cat' (TOK_ALIASLIST tkey tvalue) (TOK_SERDE (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\003')))))))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB dest1)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL tkey)) (TOK_SELEXPR (TOK_TABLE_OR_COL tvalue)))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-4 depends on stages: Stage-1
+ Stage-0 depends on stages: Stage-4
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ tmap:src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ outputColumnNames: _col0, _col1
+ Transform Operator
+ command: /bin/cat
+ output info:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ Select Operator
+ expressions:
+ expr: tkey
+ type: string
+ expr: tvalue
+ type: string
+ outputColumnNames: _col0, _col1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(_col0)
+ type: int
+ expr: _col1
+ type: string
+ outputColumnNames: _col0, _col1
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+ Stage: Stage-4
+ Conditional Operator
+ list of dependent Tasks:
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: file:/data/users/njain/hive3/hive3/build/ql/tmp/1966676963/10000
+ Map Reduce
+ Alias -> Map Operator Tree:
+ file:/data/users/njain/hive3/hive3/build/ql/tmp/1028977168/10002
+ Reduce Output Operator
+ sort order:
+ Map-reduce partition columns:
+ expr: rand()
+ type: double
+ tag: -1
+ value expressions:
+ expr: key
+ type: int
+ expr: value
+ type: string
+ Reduce Operator Tree:
+ Extract
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: dest1
+
+
+query: FROM (
+ FROM src
+ SELECT TRANSFORM(src.key, src.value) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\002'
+ USING '/bin/cat'
+ AS (tkey, tvalue) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\003'
+) tmap
+INSERT OVERWRITE TABLE dest1 SELECT tkey, tvalue
+Input: default/src
+Output: default/dest1
+query: SELECT dest1.* FROM dest1
+Input: default/dest1
+Output: file:/data/users/njain/hive3/hive3/build/ql/tmp/1073316238/10000
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
+NULL NULL
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out?rev=803350&r1=803349&r2=803350&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl3.q.out Wed Aug 12 02:01:38 2009
@@ -1,7 +1,7 @@
query: EXPLAIN
CREATE TABLE INPUTDDL3(key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' STORED AS TEXTFILE
ABSTRACT SYNTAX TREE:
- (TOK_CREATETABLE INPUTDDL3 TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_TABLEROWFORMATFIELD '\t')) TOK_TBLTEXTFILE)
+ (TOK_CREATETABLE INPUTDDL3 TOK_LIKETABLE (TOK_TABCOLLIST (TOK_TABCOL key TOK_INT) (TOK_TABCOL value TOK_STRING)) (TOK_TABLEROWFORMAT (TOK_SERDEPROPS (TOK_TABLEROWFORMATFIELD '\t'))) TOK_TBLTEXTFILE)
STAGE DEPENDENCIES:
Stage-0 is a root stage