You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by at...@apache.org on 2009/01/21 20:03:47 UTC

svn commit: r736377 - in /hadoop/hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/org/apache/hadoop/hive/ql/exec/

Author: athusoo
Date: Wed Jan 21 11:03:46 2009
New Revision: 736377

URL: http://svn.apache.org/viewvc?rev=736377&view=rev
Log:
HIVE-217. Report progress in FileSinkOperator in order to avoid
Stream closes exceptions (Johan Oskarsson via athusoo)


Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
    hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Jan 21 11:03:46 2009
@@ -219,3 +219,6 @@
 
     HIVE-25. Enable Table aliases in cluster by, distribute by and sort
     by clauses (Prasad Chakka via athusoo)
+
+    HIVE-217. Report progress during FileSinkOperator in order to avoid
+    Stream closes exceptions (Johan Oskarsson via athusoo)

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CollectOperator.java Wed Jan 21 11:03:46 2009
@@ -26,6 +26,7 @@
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -38,8 +39,8 @@
   transient protected ArrayList<ObjectInspector> rowInspectorList;
   transient int maxSize;
 
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     rowList = new ArrayList<Object> ();
     rowInspectorList = new ArrayList<ObjectInspector> ();
     maxSize = conf.getBufferSize().intValue();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecMapper.java Wed Jan 21 11:03:46 2009
@@ -54,7 +54,7 @@
       try {
         oc = output;
         mo.setOutputCollector(oc);
-        mo.initialize(jc);
+        mo.initialize(jc, reporter);
         rp = reporter;
       } catch (HiveException e) {
         abort = true;
@@ -81,7 +81,7 @@
     if(oc == null) {
       try {
         l4j.trace("Close called no row");
-        mo.initialize(jc);
+        mo.initialize(jc, null);
         rp = null;
       } catch (HiveException e) {
         abort = true;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java Wed Jan 21 11:03:46 2009
@@ -111,7 +111,7 @@
       try {
         oc = output;
         reducer.setOutputCollector(oc);
-        reducer.initialize(jc);
+        reducer.initialize(jc, reporter);
         rp = reporter;
       } catch (HiveException e) {
         abort = true;
@@ -176,7 +176,7 @@
     if(oc == null) {
       try {
         l4j.trace("Close called no row");
-        reducer.initialize(jc);
+        reducer.initialize(jc, null);
         rp = null;
       } catch (HiveException e) {
         abort = true;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExtractOperator.java Wed Jan 21 11:03:46 2009
@@ -24,6 +24,7 @@
 import org.apache.hadoop.hive.ql.plan.extractDesc;
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -35,8 +36,8 @@
   transient protected ExprNodeEvaluator eval;
   transient protected InspectableObject result = new InspectableObject();
 
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     eval = ExprNodeEvaluatorFactory.get(conf.getCol());
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java Wed Jan 21 11:03:46 2009
@@ -75,8 +75,9 @@
     }
   }
 
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
+    
     try {
       serializer = (Serializer)conf.getTableInfo().getDeserializerClass().newInstance();
       serializer.initialize(null, conf.getTableInfo().getProperties());
@@ -155,6 +156,7 @@
   Writable recordValue; 
   public void process(Object row, ObjectInspector rowInspector) throws HiveException {
     try {
+      reporter.progress();
       // user SerDe to serialize r, and write it out
       recordValue = serializer.serialize(row, rowInspector);
       outWriter.write(recordValue);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FilterOperator.java Wed Jan 21 11:03:46 2009
@@ -26,6 +26,7 @@
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapred.Reporter;
 
 /**
  * Filter operator implementation
@@ -45,8 +46,8 @@
     conditionInspectableObject = new InspectableObject();
   }
 
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     try {
       this.conditionEvaluator = ExprNodeEvaluatorFactory.get(conf.getPredicate());
       statsMap.put(Counter.FILTERED, filtered_count);

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ForwardOperator.java Wed Jan 21 11:03:46 2009
@@ -23,6 +23,7 @@
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.forwardDesc;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -31,8 +32,8 @@
  **/
 public class ForwardOperator extends  Operator<forwardDesc>  implements Serializable {
   private static final long serialVersionUID = 1L;
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     // nothing to do really ..
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java Wed Jan 21 11:03:46 2009
@@ -42,6 +42,7 @@
 import org.apache.hadoop.hive.ql.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.ql.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -112,8 +113,8 @@
   transient int           numEntriesVarSize;
   transient int           numEntriesHashTable;
 
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     totalMemory = Runtime.getRuntime().totalMemory();
 
     // init keyFields

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java Wed Jan 21 11:03:46 2009
@@ -39,6 +39,7 @@
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.mapred.Reporter;
 
 /**
  * Join operator implementation.
@@ -113,8 +114,9 @@
   HashMap<Byte, Vector<ArrayList<Object>>> storage;
   int joinEmitInterval = -1;
   
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
+    
     totalSz = 0;
     // Map that contains the rows for each alias
     storage = new HashMap<Byte, Vector<ArrayList<Object>>>();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/LimitOperator.java Wed Jan 21 11:03:46 2009
@@ -26,6 +26,7 @@
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.plan.limitDesc;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -38,8 +39,8 @@
   transient protected int limit;
   transient protected int currCount;
 
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     limit = conf.getLimit();
     currCount = 0;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapOperator.java Wed Jan 21 11:03:46 2009
@@ -23,6 +23,7 @@
 
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 
@@ -61,8 +62,8 @@
   transient private List<ObjectInspector> partObjectInspectors;
   
 
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     Path fpath = new Path((new Path (HiveConf.getVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME))).toUri().getPath());
     ArrayList<Operator<? extends Serializable>> todo = new ArrayList<Operator<? extends Serializable>> ();
     statsMap.put(Counter.DESERIALIZE_ERRORS, deserialize_error_count);
@@ -163,7 +164,7 @@
     this.setOutputCollector(out);
 
     for(Operator op: todo) {
-      op.initialize(hconf);
+      op.initialize(hconf, reporter);
     }
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Wed Jan 21 11:03:46 2009
@@ -34,6 +34,7 @@
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
 
 /**
  * Base operator implementation
@@ -48,6 +49,14 @@
   protected List<Operator<? extends Serializable>> parentOperators;
 
   public Operator() {}
+  
+  /**
+   * Create an operator with a reporter.
+   * @param reporter Used to report progress of certain operators.
+   */
+  public Operator(Reporter reporter) {
+    this.reporter = reporter;
+  }
 
   public void setChildOperators(List<Operator<? extends Serializable>> childOperators) {
     this.childOperators = childOperators;
@@ -130,6 +139,7 @@
   transient protected mapredWork gWork;
   transient protected String alias;
   transient protected String joinAlias;
+  transient protected Reporter reporter;
 
   public void setOutputCollector(OutputCollector out) {
     this.out = out;
@@ -198,15 +208,16 @@
     return(ret);
   }
 
-  public void initialize (Configuration hconf) throws HiveException {
+  public void initialize (Configuration hconf, Reporter reporter) throws HiveException {
     LOG.info("Initializing Self");
+    this.reporter = reporter;
     
     if(childOperators == null) {
       return;
     }
     LOG.info("Initializing children:");
     for(Operator<? extends Serializable> op: childOperators) {
-      op.initialize(hconf);
+      op.initialize(hconf, reporter);
     }    
     LOG.info("Initialization Done");
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java Wed Jan 21 11:03:46 2009
@@ -37,6 +37,7 @@
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.Reporter;
 
 /**
  * Reduce Sink Operator sends output to the reduce stage
@@ -70,8 +71,8 @@
   transient int tag;
   transient byte[] tagByte = new byte[1];
   
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     try {
       keyEval = new ExprNodeEvaluator[conf.getKeyCols().size()];
       int i=0;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ScriptOperator.java Wed Jan 21 11:03:46 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.hive.serde2.Serializer;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.LineRecordReader.LineReader;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.fs.FileUtil;
@@ -161,8 +162,8 @@
     }
   }
 
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     statsMap.put(Counter.DESERIALIZE_ERRORS, deserialize_error_count);
     statsMap.put(Counter.SERIALIZE_ERRORS, serialize_error_count);
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java Wed Jan 21 11:03:46 2009
@@ -28,6 +28,7 @@
 import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.mapred.Reporter;
 
 /**
  * Select operator implementation
@@ -44,8 +45,8 @@
   
   boolean firstRow;
   
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     try {
       ArrayList<exprNodeDesc> colList = conf.getColList();
       eval = new ExprNodeEvaluator[colList.size()];

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java Wed Jan 21 11:03:46 2009
@@ -23,6 +23,7 @@
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.tableScanDesc;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -32,8 +33,8 @@
  **/
 public class TableScanOperator extends Operator<tableScanDesc> implements Serializable {
   private static final long serialVersionUID = 1L;
-  public void initialize(Configuration hconf) throws HiveException {
-    super.initialize(hconf);
+  public void initialize(Configuration hconf, Reporter reporter) throws HiveException {
+    super.initialize(hconf, reporter);
     // nothing to do really ..
   }
 

Modified: hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=736377&r1=736376&r2=736377&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hadoop/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Wed Jan 21 11:03:46 2009
@@ -22,6 +22,7 @@
 import java.io.*;
 import java.util.*;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
@@ -85,7 +86,7 @@
       op.setConf(filterCtx);
 
       // runtime initialization
-      op.initialize(null);
+      op.initialize(null, null);
 
       for(InspectableObject oner: r) {
         op.process(oner.o, oner.oi);
@@ -140,7 +141,7 @@
       nextOp.add(flop);
 
       op.setChildOperators(nextOp);
-      op.initialize(new JobConf(TestOperators.class));
+      op.initialize(new JobConf(TestOperators.class), Reporter.NULL);
 
       // evaluate on row
       for(int i=0; i<5; i++) {
@@ -200,7 +201,7 @@
       sop.setChildOperators(nextCollectOp);
 
 
-      op.initialize(new JobConf(TestOperators.class));
+      op.initialize(new JobConf(TestOperators.class), null);
 
       // evaluate on row
       for(int i=0; i<5; i++) {
@@ -270,7 +271,7 @@
       // get map operator and initialize it
       MapOperator mo = new MapOperator();
       mo.setConf(mrwork);
-      mo.initialize(hconf);
+      mo.initialize(hconf, null);
 
       Text tw = new Text();
       InspectableObject io1 = new InspectableObject();