You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2008/12/17 11:35:46 UTC
svn commit: r727337 [1/3] - in /hadoop/hive/trunk: ./
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/metadata/
ql/src/java/org/apache/hadoop/hive/ql/parse/
ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/test/queri...
Author: zshao
Date: Wed Dec 17 02:35:44 2008
New Revision: 727337
URL: http://svn.apache.org/viewvc?rev=727337&view=rev
Log:
HIVE-104. Tables with at least 1 non-string columns to use DynamicSerDe.(zshao)
Added:
hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_1.q
hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java
hadoop/hive/trunk/ql/src/test/queries/clientpositive/cast1.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/input5.q
hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q
hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input19.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input20.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input7.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input8.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input9.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_part2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testsequencefile.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/input_testxpath2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/inputddl4.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join14.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join17.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join18.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join4.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join6.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join7.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join8.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/join9.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce4.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/mapreduce6.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/notable_alias2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/quote1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample1.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample4.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample5.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample6.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/sample7.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/subq2.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/udf3.q.out
hadoop/hive/trunk/ql/src/test/results/clientpositive/union2.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_index.q.out
hadoop/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
hadoop/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/thrift/TCTLSeparatedProtocol.java
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Dec 17 02:35:44 2008
@@ -43,6 +43,9 @@
BUG FIXES
+ HIVE-104. Tables with at least 1 non-string columns to use DynamicSerDe.
+ (zshao)
+
HIVE-158. Make table aliases work for sampled tables in joins.
(Raghotham Murthy via zshao)
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Wed Dec 17 02:35:44 2008
@@ -57,6 +57,7 @@
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde.thrift.columnsetSerDe;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
+import org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
@@ -396,7 +397,9 @@
.printInfo("Replacing columns for columnsetSerDe and changing to typed SerDe");
tbl.setSerializationLib(MetadataTypedColumnsetSerDe.class.getName());
} else if (!tbl.getSerializationLib().equals(
- MetadataTypedColumnsetSerDe.class.getName())) {
+ MetadataTypedColumnsetSerDe.class.getName())
+ && !tbl.getSerializationLib().equals(
+ DynamicSerDe.class.getName())) {
console
.printError("Replace columns is not supported for this table. SerDe may be incompatible.");
return 1;
@@ -524,19 +527,23 @@
}
/**
- * For now, if the user specifies either the map or the collections
- * delimiter, we infer the table to DynamicSerDe/TCTLSeparatedProtocol. In
- * the future, we should infer this for any delimiters specified, but this
- * will break older hive tables, so not for now.
+ * If the user didn't specify a SerDe, and any of the columns are not of type String,
+ * we will have to use DynamicSerDe instead.
*/
- if (crtTbl.getCollItemDelim() != null || crtTbl.getMapKeyDelim() != null) {
- tbl
- .setSerializationLib(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class
- .getName());
- tbl.setSerdeParam(
- org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT,
- org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class
- .getName());
+ if (crtTbl.getSerName() == null) {
+ boolean useDynamicSerDe = false;
+ if (crtTbl.getCols() != null) {
+ for (FieldSchema field: crtTbl.getCols()) {
+ if (!Constants.STRING_TYPE_NAME.equalsIgnoreCase(field.getType())) {
+ useDynamicSerDe = true;
+ }
+ }
+ }
+ if (useDynamicSerDe) {
+ LOG.info("Default to DynamicSerDe for table " + crtTbl.getTableName() );
+ tbl.setSerializationLib(org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe.class.getName());
+ tbl.setSerdeParam(org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol.class.getName());
+ }
}
if (crtTbl.getComment() != null)
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Wed Dec 17 02:35:44 2008
@@ -203,6 +203,8 @@
}
tbl.checkValidity();
msc.createTable(tbl.getTTable());
+ } catch (HiveException e) {
+ throw e;
} catch (Exception e) {
throw new HiveException(e);
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Dec 17 02:35:44 2008
@@ -1086,6 +1086,7 @@
RowResolver inputRR = opParseCtx.get(input).getRR();
boolean selectStar = false;
+ LOG.debug("genSelectPlan: input = " + inputRR.toString());
// Iterate over the selects
for (int i = 0; i < selExprList.getChildCount(); ++i) {
@@ -1203,6 +1204,13 @@
return r;
}
+ /**
+ * Generate the GroupByOperator for the Query Block (parseInfo.getXXX(dest)).
+ * The new GroupByOperator will be a child of the reduceSinkOperatorInfo.
+ *
+ * @param mode The mode of the aggregation (PARTIAL1 or COMPLETE)
+ * @return the new GroupByOperator
+ */
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator(
QBParseInfo parseInfo, String dest, Operator reduceSinkOperatorInfo,
@@ -1271,6 +1279,13 @@
);
}
+ /**
+ * Generate the GroupByOperator for the Query Block (parseInfo.getXXX(dest)).
+ * The new GroupByOperator will be a child of the reduceSinkOperatorInfo.
+ *
+ * @param mode The mode of the aggregation (PARTIAL2)
+ * @return the new GroupByOperator
+ */
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator1(
QBParseInfo parseInfo, String dest, Operator reduceSinkOperatorInfo,
@@ -1337,6 +1352,7 @@
String aggName = value.getChild(0).getText();
Class<? extends UDAF> aggClass = FunctionRegistry.getUDAF(aggName);
+ Method aggEvaluateMethod = FunctionRegistry.getUDAFEvaluateMethod(aggName, mode);
assert (aggClass != null);
ArrayList<exprNodeDesc> aggParameters = new ArrayList<exprNodeDesc>();
String text = entry.getKey();
@@ -1350,7 +1366,7 @@
aggregations.add(new aggregationDesc(aggClass, aggParameters, ((mode == groupByDesc.Mode.FINAL) ? false : (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI))));
groupByOutputRowResolver.put("", value.toStringTree(),
new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() - 1).toString(),
- paraExprInfo.getType()));
+ aggEvaluateMethod.getReturnType()));
}
return putOpInsertMap(
@@ -1360,6 +1376,13 @@
groupByOutputRowResolver);
}
+ /**
+ * Generate the map-side GroupByOperator for the Query Block (qb.getParseInfo().getXXX(dest)).
+ * The new GroupByOperator will be a child of the inputOperatorInfo.
+ *
+ * @param mode The mode of the aggregation (HASH)
+ * @return the new GroupByOperator
+ */
@SuppressWarnings("nls")
private Operator genGroupByPlanMapGroupByOperator(QB qb, String dest, Operator inputOperatorInfo,
groupByDesc.Mode mode) throws SemanticException {
@@ -1472,6 +1495,19 @@
return newParameters;
}
+ /**
+ * Generate the ReduceSinkOperator for the Group By Query Block (parseInfo.getXXX(dest)).
+ * The new ReduceSinkOperator will be a child of inputOperatorInfo.
+ *
+ * It will put all Group By keys and the distinct field (if any) in the map-reduce sort key,
+ * and all other fields in the map-reduce value.
+ *
+ * The map-reduce partition key will be random() if there is no distinct, or the same as
+ * the map-reduce sort key otherwise.
+ *
+ * @return the new ReduceSinkOperator.
+ * @throws SemanticException
+ */
@SuppressWarnings("nls")
private Operator genGroupByPlanReduceSinkOperator(QBParseInfo parseInfo,
String dest, Operator inputOperatorInfo)
@@ -1539,6 +1575,18 @@
reduceSinkOutputRowResolver);
}
+ /**
+ * Generate the ReduceSinkOperator for the Group By Query Block (qb.getPartInfo().getXXX(dest)).
+ * The new ReduceSinkOperator will be a child of inputOperatorInfo.
+ *
+ * It will put all Group By keys and the distinct field (if any) in the map-reduce sort key,
+ * and all other fields in the map-reduce value.
+ *
+ * @param numPartitionFields the number of fields for map-reduce partitioning.
+ * This is usually the number of fields in the Group By keys.
+ * @return the new ReduceSinkOperator.
+ * @throws SemanticException
+ */
@SuppressWarnings("nls")
private Operator genGroupByPlanReduceSinkOperator(QB qb,
String dest, Operator inputOperatorInfo, int numPartitionFields) throws SemanticException {
@@ -1607,6 +1655,19 @@
);
}
+ /**
+ * Generate the second ReduceSinkOperator for the Group By Plan (parseInfo.getXXX(dest)).
+ * The new ReduceSinkOperator will be a child of groupByOperatorInfo.
+ *
+ * The second ReduceSinkOperator will put the group by keys in the map-reduce sort
+ * key, and put the partial aggregation results in the map-reduce value.
+ *
+ * @param numPartitionFields the number of fields in the map-reduce partition key.
+ * This should always be the same as the number of Group By keys. We should be
+ * able to remove this parameter since in this phase there is no distinct any more.
+ * @return the new ReduceSinkOperator.
+ * @throws SemanticException
+ */
@SuppressWarnings("nls")
private Operator genGroupByPlanReduceSinkOperator2MR(
QBParseInfo parseInfo, String dest, Operator groupByOperatorInfo, int numPartitionFields)
@@ -1651,6 +1712,15 @@
);
}
+ /**
+ * Generate the second GroupByOperator for the Group By Plan (parseInfo.getXXX(dest)).
+ * The new GroupByOperator will do the second aggregation based on the partial aggregation
+ * results.
+ *
+ * @param mode the mode of aggregation (FINAL)
+ * @return the new GroupByOperator
+ * @throws SemanticException
+ */
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator2MR(
QBParseInfo parseInfo, String dest, Operator reduceSinkOperatorInfo2, groupByDesc.Mode mode)
@@ -1681,6 +1751,7 @@
CommonTree value = entry.getValue();
String aggName = value.getChild(0).getText();
Class<? extends UDAF> aggClass = FunctionRegistry.getUDAF(aggName);
+ Method aggEvaluateMethod = FunctionRegistry.getUDAFEvaluateMethod(aggName, mode);
assert (aggClass != null);
ArrayList<exprNodeDesc> aggParameters = new ArrayList<exprNodeDesc>();
String text = entry.getKey();
@@ -1694,7 +1765,7 @@
aggregations.add(new aggregationDesc(aggClass, aggParameters, ((mode == groupByDesc.Mode.FINAL) ? false : (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI))));
groupByOutputRowResolver2.put("", value.toStringTree(),
new ColumnInfo(Integer.valueOf(groupByKeys.size() + aggregations.size() - 1).toString(),
- paraExprInfo.getType()));
+ aggEvaluateMethod.getReturnType()));
}
return putOpInsertMap(
@@ -1994,9 +2065,8 @@
throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH.getMsg(
qb.getParseInfo().getDestForClause(dest), reason));
}
- } else {
- expressions.add(column);
}
+ expressions.add(column);
}
}
@@ -3433,7 +3503,8 @@
desc = new exprNodeIndexDesc(t, children.get(0), children.get(1));
}
else {
- throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr));
+ throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr,
+ myt.getTypeName()));
}
} else {
// other operators or functions
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/groupByDesc.java Wed Dec 17 02:35:44 2008
@@ -55,6 +55,8 @@
return "partial2";
case HASH:
return "hash";
+ case FINAL:
+ return "final";
}
return "unknown";
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/cast1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/cast1.q?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/cast1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/cast1.q Wed Dec 17 02:35:44 2008
@@ -1,4 +1,4 @@
-CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 INT, c7 INT) STORED AS TEXTFILE;
+CREATE TABLE dest1(c1 INT, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 INT, c6 STRING, c7 INT) STORED AS TEXTFILE;
EXPLAIN
FROM src INSERT OVERWRITE TABLE dest1 SELECT 3 + 2, 3.0 + 2, 3 + 2.0, 3.0 + 2.0, 3 + CAST(2.0 AS INT) + CAST(CAST(0 AS SMALLINT) AS INT), CAST(1 AS BOOLEAN), CAST(TRUE AS INT) WHERE src.key = 86;
Added: hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_1.q?rev=727337&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_1.q (added)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_1.q Wed Dec 17 02:35:44 2008
@@ -0,0 +1,17 @@
+DROP TABLE table1;
+CREATE TABLE table1 (a STRING, b STRING) STORED AS TEXTFILE;
+DESCRIBE table1;
+DESCRIBE EXTENDED table1;
+
+DROP TABLE table2;
+CREATE TABLE table2 (a STRING, b INT) STORED AS TEXTFILE;
+DESCRIBE table2;
+DESCRIBE EXTENDED table2;
+
+DROP TABLE table3;
+CREATE TABLE table3 (a STRING, b STRING)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
+STORED AS TEXTFILE;
+DESCRIBE table3;
+DESCRIBE EXTENDED table3;
+
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3.q Wed Dec 17 02:35:44 2008
@@ -1,4 +1,4 @@
-CREATE TABLE dest1(c1 INT, c2 INT, c3 INT, c4 INT, c5 INT) STORED AS TEXTFILE;
+CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE) STORED AS TEXTFILE;
EXPLAIN
FROM src
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/groupby3_map.q Wed Dec 17 02:35:44 2008
@@ -1,6 +1,6 @@
set hive.map.aggr=true;
-CREATE TABLE dest1(c1 INT, c2 INT, c3 INT, c4 INT, c5 INT) STORED AS TEXTFILE;
+CREATE TABLE dest1(c1 DOUBLE, c2 DOUBLE, c3 DOUBLE, c4 DOUBLE, c5 DOUBLE) STORED AS TEXTFILE;
EXPLAIN
FROM src
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input1.q Wed Dec 17 02:35:44 2008
@@ -1,4 +1,4 @@
-CREATE TABLE TEST1(A INT, B FLOAT) STORED AS TEXTFILE;
+CREATE TABLE TEST1(A INT, B DOUBLE) STORED AS TEXTFILE;
EXPLAIN
DESCRIBE TEST1;
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input2.q Wed Dec 17 02:35:44 2008
@@ -1,8 +1,8 @@
DROP TABLE TEST2a;
-CREATE TABLE TEST2a(A INT, B FLOAT) STORED AS TEXTFILE;
+CREATE TABLE TEST2a(A INT, B DOUBLE) STORED AS TEXTFILE;
DESCRIBE TEST2a;
DROP TABLE TEST2b;
-CREATE TABLE TEST2b(A ARRAY<INT>, B FLOAT, C MAP<FLOAT, INT>) STORED AS TEXTFILE;
+CREATE TABLE TEST2b(A ARRAY<INT>, B DOUBLE, C MAP<DOUBLE, INT>) STORED AS TEXTFILE;
DESCRIBE TEST2b;
SHOW TABLES;
DROP TABLE TEST2a;
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input3.q Wed Dec 17 02:35:44 2008
@@ -1,14 +1,14 @@
DROP TABLE TEST3a;
DROP TABLE TEST3b;
DROP TABLE TEST3c;
-CREATE TABLE TEST3a(A INT, B FLOAT) STORED AS TEXTFILE;
+CREATE TABLE TEST3a(A INT, B DOUBLE) STORED AS TEXTFILE;
DESCRIBE TEST3a;
-CREATE TABLE TEST3b(A ARRAY<INT>, B FLOAT, C MAP<FLOAT, INT>) STORED AS TEXTFILE;
+CREATE TABLE TEST3b(A ARRAY<INT>, B DOUBLE, C MAP<DOUBLE, INT>) STORED AS TEXTFILE;
DESCRIBE TEST3b;
SHOW TABLES;
EXPLAIN
-ALTER TABLE TEST3b ADD COLUMNS (X FLOAT);
-ALTER TABLE TEST3b ADD COLUMNS (X FLOAT);
+ALTER TABLE TEST3b ADD COLUMNS (X DOUBLE);
+ALTER TABLE TEST3b ADD COLUMNS (X DOUBLE);
DESCRIBE TEST3b;
EXPLAIN
ALTER TABLE TEST3b RENAME TO TEST3c;
@@ -16,8 +16,8 @@
DESCRIBE TEST3c;
SHOW TABLES;
EXPLAIN
-ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 FLOAT);
-ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 FLOAT);
+ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 DOUBLE);
+ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 DOUBLE);
DESCRIBE EXTENDED TEST3c;
DROP TABLE TEST3c;
DROP TABLE TEST3a;
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/input5.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/input5.q?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/input5.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/input5.q Wed Dec 17 02:35:44 2008
@@ -1,4 +1,4 @@
-CREATE TABLE dest1(key INT, value STRING) STORED AS TEXTFILE;
+CREATE TABLE dest1(key STRING, value STRING) STORED AS TEXTFILE;
EXPLAIN
FROM (
Modified: hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q (original)
+++ hadoop/hive/trunk/ql/src/test/queries/clientpositive/inputddl4.q Wed Dec 17 02:35:44 2008
@@ -1,6 +1,6 @@
-- a simple test to test sorted/clustered syntax
DROP TABLE INPUTDDL4;
-CREATE TABLE INPUTDDL4(viewTime DATETIME, userid INT,
+CREATE TABLE INPUTDDL4(viewTime STRING, userid INT,
page_url STRING, referrer_url STRING,
friends ARRAY<BIGINT>, properties MAP<STRING, STRING>,
ip STRING COMMENT 'IP Address of the User')
Modified: hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_create_tbl1.q.out Wed Dec 17 02:35:44 2008
@@ -1,2 +1,2 @@
-FAILED: Error in metadata: org.apache.hadoop.hive.ql.metadata.HiveException: Partition collumn name aint conflicts with table columns.
+FAILED: Error in metadata: Partition collumn name aint conflicts with table columns.
FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/case_sensitivity.q.out Wed Dec 17 02:35:44 2008
@@ -31,7 +31,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Stage: Stage-0
@@ -41,7 +41,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/cast1.q.out Wed Dec 17 02:35:44 2008
@@ -34,13 +34,29 @@
type: boolean
expr: UDFToInteger(true)
type: int
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: 0
+ type: int
+ expr: 1
+ type: double
+ expr: 2
+ type: double
+ expr: 3
+ type: double
+ expr: 4
+ type: int
+ expr: UDFToString(5)
+ type: string
+ expr: 6
+ type: int
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -49,8 +65,8 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
-5 5.0 5.0 5.0 5 false 1
+5 5.0 5.0 5.0 5 FALSE 1
Added: hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out?rev=727337&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out (added)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/create_1.q.out Wed Dec 17 02:35:44 2008
@@ -0,0 +1,18 @@
+a string
+b string
+a string
+b string
+Detailed Table Information:
+Table(tableName:table1,dbName:default,owner:zshao,createTime:1228887062,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/zshao/sync/apache-trunk/build/ql/test/data/warehouse/table1,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=1}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+a string
+b int
+a string
+b int
+Detailed Table Information:
+Table(tableName:table2,dbName:default,owner:zshao,createTime:1228887063,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:int,comment:null)],location:file:/data/users/zshao/sync/apache-trunk/build/ql/test/data/warehouse/table2,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe,parameters:{serialization.format=org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol}),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
+a string
+b string
+a string
+b string
+Detailed Table Information:
+Table(tableName:table3,dbName:default,owner:zshao,createTime:1228887063,lastAccessTime:0,retention:0,sd:StorageDescriptor(cols:[FieldSchema(name:a,type:string,comment:null), FieldSchema(name:b,type:string,comment:null)],location:file:/data/users/zshao/sync/apache-trunk/build/ql/test/data/warehouse/table3,inputFormat:org.apache.hadoop.mapred.TextInputFormat,outputFormat:org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat,compressed:false,numBuckets:-1,serdeInfo:SerDeInfo(name:null,serializationLib:org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe,parameters:{serialization.format=9,field.delim= }),bucketCols:[],sortCols:[],parameters:{}),partitionKeys:[],parameters:{})
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1.q.out Wed Dec 17 02:35:44 2008
@@ -41,7 +41,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/69808444/374659791.10001
+ /tmp/hive-zshao/67494501/106593589.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -61,20 +61,26 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest_g1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest_g1
Stage: Stage-0
Move Operator
@@ -83,7 +89,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest_g1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_limit.q.out Wed Dec 17 02:35:44 2008
@@ -42,7 +42,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/786365372/1698354371.10001
+ /tmp/hive-zshao/618493432/49810635.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -62,7 +62,7 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
@@ -80,7 +80,7 @@
Stage: Stage-3
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/786365372/1698354371.10002
+ /tmp/hive-zshao/618493432/49810635.10002
Reduce Output Operator
sort order:
tag: -1
@@ -93,13 +93,19 @@
Reduce Operator Tree:
Extract
Limit
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -108,7 +114,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby1_map.q.out Wed Dec 17 02:35:44 2008
@@ -48,7 +48,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/176699107/318789303.10001
+ /tmp/hive-zshao/632752964/335784834.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -68,20 +68,26 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: double
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -90,7 +96,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2.q.out Wed Dec 17 02:35:44 2008
@@ -43,7 +43,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/190784876/1532193204.10001
+ /tmp/hive-zshao/75837910/35360202.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -66,7 +66,7 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
@@ -75,13 +75,21 @@
type: bigint
expr: concat(0, UDFToString(2))
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest_g2
+ Select Operator
+ expressions:
+ expr: 0
+ type: string
+ expr: UDFToInteger(1)
+ type: int
+ expr: 2
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest_g2
Stage: Stage-0
Move Operator
@@ -90,7 +98,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest_g2
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_limit.q.out Wed Dec 17 02:35:44 2008
@@ -41,7 +41,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/426666771/995404034.10002
+ /tmp/hive-zshao/42329253/238260652.10002
Reduce Output Operator
key expressions:
expr: 0
@@ -61,7 +61,7 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby2_map.q.out Wed Dec 17 02:35:44 2008
@@ -58,7 +58,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/729182472/421148418.10001
+ /tmp/hive-zshao/107370008/1090440963.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -81,7 +81,7 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
@@ -90,13 +90,21 @@
type: bigint
expr: concat(0, UDFToString(2))
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: 0
+ type: string
+ expr: UDFToInteger(1)
+ type: int
+ expr: 2
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -105,7 +113,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3.q.out Wed Dec 17 02:35:44 2008
@@ -43,7 +43,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/793012867/112314829.10001
+ /tmp/hive-zshao/695280947/659390410.10001
Reduce Output Operator
sort order:
tag: -1
@@ -66,15 +66,15 @@
expr: avg(VALUE.2)
expr: min(VALUE.3)
expr: max(VALUE.4)
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 1
type: double
expr: 2
- type: string
+ type: double
expr: 0
- type: string
+ type: double
expr: 4
type: double
expr: 3
@@ -84,7 +84,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Stage: Stage-0
@@ -94,7 +94,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby3_map.q.out Wed Dec 17 02:35:44 2008
@@ -65,7 +65,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/16311816/1248542091.10001
+ /tmp/hive-zshao/151053238/135596442.10001
Reduce Output Operator
sort order:
tag: -1
@@ -88,15 +88,15 @@
expr: avg(VALUE.2)
expr: min(VALUE.3)
expr: max(VALUE.4)
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 1
type: double
expr: 2
- type: string
+ type: double
expr: 0
- type: string
+ type: double
expr: 4
type: double
expr: 3
@@ -106,7 +106,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Stage: Stage-0
@@ -116,7 +116,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4.q.out Wed Dec 17 02:35:44 2008
@@ -40,7 +40,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/1453689426/82458415.10001
+ /tmp/hive-zshao/1491006708/287075280.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -55,7 +55,7 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby4_map.q.out Wed Dec 17 02:35:44 2008
@@ -28,18 +28,22 @@
Group By Operator
aggregations:
expr: count(VALUE.0)
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
type: bigint
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -48,7 +52,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5.q.out Wed Dec 17 02:35:44 2008
@@ -41,7 +41,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/46132837/471661111.10001
+ /tmp/hive-zshao/350108858/95672649.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -61,20 +61,26 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
type: string
expr: 1
type: double
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: UDFToString(1)
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -83,7 +89,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby5_map.q.out Wed Dec 17 02:35:44 2008
@@ -31,18 +31,22 @@
Group By Operator
aggregations:
expr: sum(VALUE.0)
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
type: double
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -51,8 +55,8 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
-130091.0
+130091
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/groupby6.q.out Wed Dec 17 02:35:44 2008
@@ -40,7 +40,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/486205309/55090089.10001
+ /tmp/hive-zshao/454201677/52450507.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -55,7 +55,7 @@
keys:
expr: KEY.0
type: string
- mode: unknown
+ mode: final
Select Operator
expressions:
expr: 0
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input1.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input1.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input1.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input1.q.out Wed Dec 17 02:35:44 2008
@@ -12,4 +12,4 @@
a int
-b float
+b double
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input11.q.out Wed Dec 17 02:35:44 2008
@@ -20,13 +20,19 @@
type: string
expr: value
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -35,7 +41,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input11_limit.q.out Wed Dec 17 02:35:44 2008
@@ -33,13 +33,19 @@
Reduce Operator Tree:
Extract
Limit
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -48,7 +54,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input12.q.out Wed Dec 17 02:35:44 2008
@@ -20,13 +20,19 @@
type: string
expr: value
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Filter Operator
predicate:
expr: ((key >= 100) and (key < 200))
@@ -37,13 +43,19 @@
type: string
expr: value
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest2
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest2
Filter Operator
predicate:
expr: (key >= 200)
@@ -52,13 +64,17 @@
expressions:
expr: key
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest3
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest3
Stage: Stage-0
Move Operator
@@ -67,13 +83,13 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
replace: true
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest2
partition:
ds 2008-04-08
@@ -82,7 +98,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest3
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input13.q.out Wed Dec 17 02:35:44 2008
@@ -20,13 +20,19 @@
type: string
expr: value
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Filter Operator
predicate:
expr: ((key >= 100) and (key < 200))
@@ -37,13 +43,19 @@
type: string
expr: value
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest2
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest2
Filter Operator
predicate:
expr: ((key >= 200) and (key < 300))
@@ -52,13 +64,17 @@
expressions:
expr: key
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest3
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest3
Filter Operator
predicate:
expr: (key >= 300)
@@ -83,13 +99,13 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
replace: true
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest2
partition:
ds 2008-04-08
@@ -98,7 +114,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest3
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input14.q.out Wed Dec 17 02:35:44 2008
@@ -47,13 +47,19 @@
type: string
expr: 1
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -62,7 +68,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input14_limit.q.out Wed Dec 17 02:35:44 2008
@@ -49,7 +49,7 @@
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/606891034/24901173.10001
+ /tmp/hive-zshao/2396737/195622561.10001
Reduce Output Operator
key expressions:
expr: 0
@@ -78,13 +78,19 @@
type: string
expr: 1
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -93,7 +99,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input17.q.out Wed Dec 17 02:35:44 2008
@@ -51,13 +51,19 @@
type: string
expr: 1
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -66,7 +72,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input18.q.out Wed Dec 17 02:35:44 2008
@@ -51,13 +51,19 @@
type: string
expr: regexp_replace(1, ' ', '+')
type: string
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -66,7 +72,7 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input19.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input19.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input19.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input19.q.out Wed Dec 17 02:35:44 2008
@@ -1 +1 @@
-127.0.0.1 frank 10/Oct/2000:13:55:36 -0700 GET /apache_pb.gif HTTP/1.0 200 2326
+127.0.0.1 NULL frank 10/Oct/2000:13:55:36 -0700 GET /apache_pb.gif HTTP/1.0 200 2326
Modified: hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out?rev=727337&r1=727336&r2=727337&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out (original)
+++ hadoop/hive/trunk/ql/src/test/results/clientpositive/input1_limit.q.out Wed Dec 17 02:35:44 2008
@@ -52,13 +52,19 @@
Reduce Operator Tree:
Extract
Limit
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest1
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest1
Stage: Stage-0
Move Operator
@@ -67,19 +73,19 @@
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest1
replace: true
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
name: dest2
Stage: Stage-2
Map Reduce
Alias -> Map Operator Tree:
- /tmp/hive-jssarma/891215760/586361272.10002
+ /tmp/hive-zshao/196681773/625336699.10002
Reduce Output Operator
sort order:
tag: -1
@@ -92,13 +98,19 @@
Reduce Operator Tree:
Extract
Limit
- File Output Operator
- compressed: false
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
- name: dest2
+ Select Operator
+ expressions:
+ expr: UDFToInteger(0)
+ type: int
+ expr: 1
+ type: string
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.dynamic_type.DynamicSerDe
+ name: dest2
86 val_86