You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by he...@apache.org on 2010/02/28 20:59:33 UTC
svn commit: r917282 - in /hadoop/hive/trunk: ./
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/parse/
ql/src/java/org/apache/hadoop/hive/ql/plan/
ql/src/test/org/apache/hadoop/hive/ql/exec/ ql/src/test/results/compile...
Author: heyongqiang
Date: Sun Feb 28 19:59:32 2010
New Revision: 917282
URL: http://svn.apache.org/viewvc?rev=917282&view=rev
Log:
HIVE-1204. typedbytes: writing to stderr kills the mapper.(namit via He Yongqiang)
Modified:
hadoop/hive/trunk/CHANGES.txt
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Sun Feb 28 19:59:32 2010
@@ -234,6 +234,9 @@
HIVE-1200. Fix CombineHiveInputFormat
(Zheng Shao via namit)
+ HIVE-1204. typedbytes: writing to stderr kills the mapper.
+ (namit via He Yongqiang)
+
Release 0.5.0 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java Sun Feb 28 19:59:32 2010
@@ -297,9 +297,9 @@
new OutputStreamProcessor(scriptOutputDeserializer
.getObjectInspector()), "OutputProcessor");
- RecordReader scriptErrReader = conf.getOutRecordReaderClass()
+ RecordReader scriptErrReader = conf.getErrRecordReaderClass()
.newInstance();
- scriptErrReader.initialize(scriptErr, hconf, conf.getScriptOutputInfo()
+ scriptErrReader.initialize(scriptErr, hconf, conf.getScriptErrInfo()
.getProperties());
errThread = new StreamThread(scriptErrReader, new ErrorStreamProcessor(
@@ -482,7 +482,7 @@
/**
* The processor for stderr stream.
- *
+ *
* TODO: In the future when we move to hadoop 0.18 and above, we should borrow
* the logic from HadoopStreaming: PipeMapRed.java MRErrorThread to support
* counters and status updates.
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Sun Feb 28 19:59:32 2010
@@ -1451,6 +1451,7 @@
}
TableDesc outInfo;
+ TableDesc errInfo;
TableDesc inInfo;
String defaultSerdeName = conf.getVar(HiveConf.ConfVars.HIVESCRIPTSERDE);
Class<? extends Deserializer> serde;
@@ -1487,17 +1488,21 @@
.toString(), defaultOutputCols);
}
+ // Error stream always uses the default serde with a single column
+ errInfo = PlanUtils.getTableDesc(serde, Integer.toString(Utilities.tabCode), "KEY");
+
// Output record readers
Class<? extends RecordReader> outRecordReader = getRecordReader((ASTNode) trfm
.getChild(outputRecordReaderNum));
Class<? extends RecordWriter> inRecordWriter = getRecordWriter((ASTNode) trfm
.getChild(inputRecordWriterNum));
+ Class<? extends RecordReader> errRecordReader = getDefaultRecordReader();
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ScriptDesc(
getFixedCmd(stripQuotes(trfm.getChild(execPos).getText())), inInfo,
- inRecordWriter, outInfo, outRecordReader), new RowSchema(out_rwsch
- .getColumnInfos()), input), out_rwsch);
+ inRecordWriter, outInfo, outRecordReader, errRecordReader, errInfo),
+ new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch);
return output;
}
@@ -1520,6 +1525,20 @@
}
}
+ private Class<? extends RecordReader> getDefaultRecordReader()
+ throws SemanticException {
+ String name;
+
+ name = conf.getVar(HiveConf.ConfVars.HIVESCRIPTRECORDREADER);
+
+ try {
+ return (Class<? extends RecordReader>) Class.forName(name, true,
+ JavaUtils.getClassLoader());
+ } catch (ClassNotFoundException e) {
+ throw new SemanticException(e);
+ }
+ }
+
private Class<? extends RecordWriter> getRecordWriter(ASTNode node)
throws SemanticException {
String name;
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java Sun Feb 28 19:59:32 2010
@@ -39,19 +39,26 @@
private TableDesc scriptInputInfo;
private Class<? extends RecordReader> outRecordReaderClass;
+ private TableDesc scriptErrInfo;
+ private Class<? extends RecordReader> errRecordReaderClass;
+
public ScriptDesc() {
}
public ScriptDesc(final String scriptCmd, final TableDesc scriptInputInfo,
final Class<? extends RecordWriter> inRecordWriterClass,
final TableDesc scriptOutputInfo,
- final Class<? extends RecordReader> outRecordReaderClass) {
+ final Class<? extends RecordReader> outRecordReaderClass,
+ final Class<? extends RecordReader> errRecordReaderClass,
+ final TableDesc scriptErrInfo) {
this.scriptCmd = scriptCmd;
this.scriptInputInfo = scriptInputInfo;
this.inRecordWriterClass = inRecordWriterClass;
this.scriptOutputInfo = scriptOutputInfo;
this.outRecordReaderClass = outRecordReaderClass;
+ this.errRecordReaderClass = errRecordReaderClass;
+ this.scriptErrInfo = scriptErrInfo;
}
@Explain(displayName = "command")
@@ -72,6 +79,14 @@
this.scriptOutputInfo = scriptOutputInfo;
}
+ public TableDesc getScriptErrInfo() {
+ return scriptErrInfo;
+ }
+
+ public void setScriptErrInfo(final TableDesc scriptErrInfo) {
+ this.scriptErrInfo = scriptErrInfo;
+ }
+
public TableDesc getScriptInputInfo() {
return scriptInputInfo;
}
@@ -97,6 +112,22 @@
}
/**
+ * @return the errRecordReaderClass
+ */
+ public Class<? extends RecordReader> getErrRecordReaderClass() {
+ return errRecordReaderClass;
+ }
+
+ /**
+ * @param errRecordReaderClass
+ * the errRecordReaderClass to set
+ */
+ public void setErrRecordReaderClass(
+ Class<? extends RecordReader> errRecordReaderClass) {
+ this.errRecordReaderClass = errRecordReaderClass;
+ }
+
+ /**
* @return the inRecordWriterClass
*/
public Class<? extends RecordWriter> getInRecordWriterClass() {
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Sun Feb 28 19:59:32 2010
@@ -56,7 +56,7 @@
/**
* Mimics the actual query compiler in generating end to end plans and testing
* them out.
- *
+ *
*/
public class TestExecDriver extends TestCase {
@@ -204,7 +204,8 @@
Operator<ScriptDesc> op2 = OperatorFactory.get(new ScriptDesc("/bin/cat",
PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
- + Utilities.tabCode, "key,value"), TextRecordReader.class), op3);
+ + Utilities.tabCode, "key,value"), TextRecordReader.class,
+ TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op3);
Operator<FilterDesc> op1 = OperatorFactory.get(getTestFilterDesc("key"),
op2);
@@ -331,7 +332,8 @@
Operator<ScriptDesc> op0 = OperatorFactory.get(new ScriptDesc("/bin/cat",
PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key,value"),
TextRecordWriter.class, PlanUtils.getDefaultTableDesc(""
- + Utilities.tabCode, "tkey,tvalue"), TextRecordReader.class), op1);
+ + Utilities.tabCode, "tkey,tvalue"), TextRecordReader.class,
+ TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op1);
Operator<SelectDesc> op4 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn("key"), getStringColumn("value")),
@@ -406,7 +408,8 @@
"\'/bin/cat\'", PlanUtils.getDefaultTableDesc("" + Utilities.tabCode,
"tkey,tvalue"), TextRecordWriter.class, PlanUtils
.getDefaultTableDesc("" + Utilities.tabCode, "tkey,tvalue"),
- TextRecordReader.class), op1);
+ TextRecordReader.class,
+ TextRecordReader.class, PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key")), op1);
Operator<SelectDesc> op4 = OperatorFactory.get(new SelectDesc(Utilities
.makeList(getStringColumn("key"), getStringColumn("value")),
Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Sun Feb 28 19:59:32 2010
@@ -215,7 +215,9 @@
TableDesc scriptInput = PlanUtils.getDefaultTableDesc(""
+ Utilities.tabCode, "a,b");
ScriptDesc sd = new ScriptDesc("cat", scriptOutput,
- TextRecordWriter.class, scriptInput, TextRecordReader.class);
+ TextRecordWriter.class, scriptInput,
+ TextRecordReader.class, TextRecordReader.class,
+ PlanUtils.getDefaultTableDesc("" + Utilities.tabCode, "key"));
Operator<ScriptDesc> sop = OperatorFactory.getAndMakeChild(sd, op);
// Collect operator to observe the output of the script
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input20.q.xml Sun Feb 28 19:59:32 2010
@@ -66,7 +66,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1266455880</string>
+ <string>1267374964</string>
</void>
</object>
</void>
@@ -331,6 +331,9 @@
</void>
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.ScriptDesc">
+ <void property="errRecordReaderClass">
+ <class>org.apache.hadoop.hive.ql.exec.TextRecordReader</class>
+ </void>
<void property="inRecordWriterClass">
<class>org.apache.hadoop.hive.ql.exec.TextRecordWriter</class>
</void>
@@ -340,6 +343,35 @@
<void property="scriptCmd">
<string>cat</string>
</void>
+ <void property="scriptErrInfo">
+ <object class="org.apache.hadoop.hive.ql.plan.TableDesc">
+ <void property="deserializerClass">
+ <class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>
+ </void>
+ <void property="inputFileFormatClass">
+ <class>org.apache.hadoop.mapred.TextInputFormat</class>
+ </void>
+ <void property="outputFileFormatClass">
+ <class>org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat</class>
+ </void>
+ <void property="properties">
+ <object class="java.util.Properties">
+ <void method="put">
+ <string>field.delim</string>
+ <string>9</string>
+ </void>
+ <void method="put">
+ <string>columns</string>
+ <string>KEY</string>
+ </void>
+ <void method="put">
+ <string>serialization.format</string>
+ <string>9</string>
+ </void>
+ </object>
+ </void>
+ </object>
+ </void>
<void property="scriptInputInfo">
<object class="org.apache.hadoop.hive.ql.plan.TableDesc">
<void property="deserializerClass">
@@ -790,7 +822,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1266455880</string>
+ <string>1267374964</string>
</void>
</object>
</void>
@@ -837,7 +869,7 @@
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.FileSinkDesc">
<void property="dirName">
- <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-18-01_084_2871398099488603052/10001</string>
+ <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-28_08-36-05_651_2136775280486866599/10001</string>
</void>
<void property="numFiles">
<int>1</int>
@@ -936,6 +968,9 @@
</void>
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.ScriptDesc">
+ <void property="errRecordReaderClass">
+ <class>org.apache.hadoop.hive.ql.exec.TextRecordReader</class>
+ </void>
<void property="inRecordWriterClass">
<class>org.apache.hadoop.hive.ql.exec.TextRecordWriter</class>
</void>
@@ -945,6 +980,35 @@
<void property="scriptCmd">
<string>uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@"</string>
</void>
+ <void property="scriptErrInfo">
+ <object class="org.apache.hadoop.hive.ql.plan.TableDesc">
+ <void property="deserializerClass">
+ <class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>
+ </void>
+ <void property="inputFileFormatClass">
+ <class>org.apache.hadoop.mapred.TextInputFormat</class>
+ </void>
+ <void property="outputFileFormatClass">
+ <class>org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat</class>
+ </void>
+ <void property="properties">
+ <object class="java.util.Properties">
+ <void method="put">
+ <string>field.delim</string>
+ <string>9</string>
+ </void>
+ <void method="put">
+ <string>columns</string>
+ <string>KEY</string>
+ </void>
+ <void method="put">
+ <string>serialization.format</string>
+ <string>9</string>
+ </void>
+ </object>
+ </void>
+ </object>
+ </void>
<void property="scriptInputInfo">
<object class="org.apache.hadoop.hive.ql.plan.TableDesc">
<void property="deserializerClass">
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input4.q.xml Sun Feb 28 19:59:32 2010
@@ -26,7 +26,7 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-18-07_733_959669286421354323/10000</string>
+ <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-28_08-36-11_936_6235612775515748937/10000</string>
</void>
<void property="table">
<object id="TableDesc0" class="org.apache.hadoop.hive.ql.plan.TableDesc">
@@ -83,7 +83,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1266455887</string>
+ <string>1267374971</string>
</void>
</object>
</void>
@@ -93,7 +93,7 @@
</object>
</void>
<void property="tmpDir">
- <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-18-07_733_959669286421354323/10001</string>
+ <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-28_08-36-11_936_6235612775515748937/10001</string>
</void>
</object>
</void>
@@ -168,7 +168,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1266455886</string>
+ <string>1267374971</string>
</void>
</object>
</void>
@@ -433,6 +433,9 @@
</void>
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.ScriptDesc">
+ <void property="errRecordReaderClass">
+ <class>org.apache.hadoop.hive.ql.exec.TextRecordReader</class>
+ </void>
<void property="inRecordWriterClass">
<class>org.apache.hadoop.hive.ql.exec.TextRecordWriter</class>
</void>
@@ -442,6 +445,35 @@
<void property="scriptCmd">
<string>/bin/cat</string>
</void>
+ <void property="scriptErrInfo">
+ <object class="org.apache.hadoop.hive.ql.plan.TableDesc">
+ <void property="deserializerClass">
+ <class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>
+ </void>
+ <void property="inputFileFormatClass">
+ <class>org.apache.hadoop.mapred.TextInputFormat</class>
+ </void>
+ <void property="outputFileFormatClass">
+ <class>org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat</class>
+ </void>
+ <void property="properties">
+ <object class="java.util.Properties">
+ <void method="put">
+ <string>field.delim</string>
+ <string>9</string>
+ </void>
+ <void method="put">
+ <string>columns</string>
+ <string>KEY</string>
+ </void>
+ <void method="put">
+ <string>serialization.format</string>
+ <string>9</string>
+ </void>
+ </object>
+ </void>
+ </object>
+ </void>
<void property="scriptInputInfo">
<object class="org.apache.hadoop.hive.ql.plan.TableDesc">
<void property="deserializerClass">
@@ -815,7 +847,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1266455886</string>
+ <string>1267374971</string>
</void>
</object>
</void>
@@ -865,7 +897,7 @@
<int>1</int>
</void>
<void property="dirName">
- <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-18-07_733_959669286421354323/10000</string>
+ <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-28_08-36-11_936_6235612775515748937/10000</string>
</void>
<void property="numFiles">
<int>1</int>
Modified: hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml?rev=917282&r1=917281&r2=917282&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml (original)
+++ hadoop/hive/trunk/ql/src/test/results/compiler/plan/input5.q.xml Sun Feb 28 19:59:32 2010
@@ -26,7 +26,7 @@
<boolean>true</boolean>
</void>
<void property="sourceDir">
- <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-18-10_872_2056588897578274641/10000</string>
+ <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-28_08-36-14_735_3125261520058122794/10000</string>
</void>
<void property="table">
<object id="TableDesc0" class="org.apache.hadoop.hive.ql.plan.TableDesc">
@@ -83,7 +83,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1266455890</string>
+ <string>1267374974</string>
</void>
</object>
</void>
@@ -93,7 +93,7 @@
</object>
</void>
<void property="tmpDir">
- <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-18-10_872_2056588897578274641/10001</string>
+ <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-28_08-36-14_735_3125261520058122794/10001</string>
</void>
</object>
</void>
@@ -172,7 +172,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1266455890</string>
+ <string>1267374974</string>
</void>
</object>
</void>
@@ -437,6 +437,9 @@
</void>
<void property="conf">
<object class="org.apache.hadoop.hive.ql.plan.ScriptDesc">
+ <void property="errRecordReaderClass">
+ <class>org.apache.hadoop.hive.ql.exec.TextRecordReader</class>
+ </void>
<void property="inRecordWriterClass">
<class>org.apache.hadoop.hive.ql.exec.TextRecordWriter</class>
</void>
@@ -446,6 +449,35 @@
<void property="scriptCmd">
<string>/bin/cat</string>
</void>
+ <void property="scriptErrInfo">
+ <object class="org.apache.hadoop.hive.ql.plan.TableDesc">
+ <void property="deserializerClass">
+ <class>org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe</class>
+ </void>
+ <void property="inputFileFormatClass">
+ <class>org.apache.hadoop.mapred.TextInputFormat</class>
+ </void>
+ <void property="outputFileFormatClass">
+ <class>org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat</class>
+ </void>
+ <void property="properties">
+ <object class="java.util.Properties">
+ <void method="put">
+ <string>field.delim</string>
+ <string>9</string>
+ </void>
+ <void method="put">
+ <string>columns</string>
+ <string>KEY</string>
+ </void>
+ <void method="put">
+ <string>serialization.format</string>
+ <string>9</string>
+ </void>
+ </object>
+ </void>
+ </object>
+ </void>
<void property="scriptInputInfo">
<object class="org.apache.hadoop.hive.ql.plan.TableDesc">
<void property="deserializerClass">
@@ -913,7 +945,7 @@
</void>
<void method="put">
<string>transient_lastDdlTime</string>
- <string>1266455890</string>
+ <string>1267374974</string>
</void>
</object>
</void>
@@ -959,7 +991,7 @@
<int>1</int>
</void>
<void property="dirName">
- <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-17_17-18-10_872_2056588897578274641/10000</string>
+ <string>file:/data/users/njain/hive1/hive1/build/ql/scratchdir/hive_2010-02-28_08-36-14_735_3125261520058122794/10000</string>
</void>
<void property="numFiles">
<int>1</int>