You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2012/08/29 19:44:02 UTC

svn commit: r1378659 [4/4] - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/ java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/index/compact/ java/org/apache/hadoop/hive/ql/io/ java/org/apache/hadoop/hive/ql/lib/ java/org/apach...

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java Wed Aug 29 17:43:59 2012
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.udf.gen
  *
  */
 @Explain(displayName = "Group By Operator")
-public class GroupByDesc implements java.io.Serializable {
+public class GroupByDesc extends AbstractOperatorDesc {
   /**
    * Group-by Mode: COMPLETE: complete 1-phase aggregation: iterate, terminate
    * PARTIAL1: partial aggregation - first phase: iterate, terminatePartial
@@ -54,9 +54,9 @@ public class GroupByDesc implements java
   private boolean groupKeyNotReductionKey;
   private boolean bucketGroup;
 
-  private java.util.ArrayList<ExprNodeDesc> keys;
-  private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators;
-  private java.util.ArrayList<java.lang.String> outputColumnNames;
+  private ArrayList<ExprNodeDesc> keys;
+  private ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators;
+  private ArrayList<java.lang.String> outputColumnNames;
   private float groupByMemoryUsage;
   private float memoryThreshold;
 
@@ -65,9 +65,9 @@ public class GroupByDesc implements java
 
   public GroupByDesc(
       final Mode mode,
-      final java.util.ArrayList<java.lang.String> outputColumnNames,
-      final java.util.ArrayList<ExprNodeDesc> keys,
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
+      final ArrayList<java.lang.String> outputColumnNames,
+      final ArrayList<ExprNodeDesc> keys,
+      final ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
       final boolean groupKeyNotReductionKey,float groupByMemoryUsage, float memoryThreshold) {
     this(mode, outputColumnNames, keys, aggregators, groupKeyNotReductionKey,
         false, groupByMemoryUsage, memoryThreshold);
@@ -75,9 +75,9 @@ public class GroupByDesc implements java
 
   public GroupByDesc(
       final Mode mode,
-      final java.util.ArrayList<java.lang.String> outputColumnNames,
-      final java.util.ArrayList<ExprNodeDesc> keys,
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
+      final ArrayList<java.lang.String> outputColumnNames,
+      final ArrayList<ExprNodeDesc> keys,
+      final ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators,
       final boolean groupKeyNotReductionKey, final boolean bucketGroup,float groupByMemoryUsage, float memoryThreshold) {
     this.mode = mode;
     this.outputColumnNames = outputColumnNames;
@@ -120,21 +120,21 @@ public class GroupByDesc implements java
   }
 
   @Explain(displayName = "keys")
-  public java.util.ArrayList<ExprNodeDesc> getKeys() {
+  public ArrayList<ExprNodeDesc> getKeys() {
     return keys;
   }
 
-  public void setKeys(final java.util.ArrayList<ExprNodeDesc> keys) {
+  public void setKeys(final ArrayList<ExprNodeDesc> keys) {
     this.keys = keys;
   }
 
   @Explain(displayName = "outputColumnNames")
-  public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
+  public ArrayList<java.lang.String> getOutputColumnNames() {
     return outputColumnNames;
   }
 
   public void setOutputColumnNames(
-      java.util.ArrayList<java.lang.String> outputColumnNames) {
+      ArrayList<java.lang.String> outputColumnNames) {
     this.outputColumnNames = outputColumnNames;
   }
 
@@ -155,12 +155,12 @@ public class GroupByDesc implements java
   }
 
   @Explain(displayName = "aggregations")
-  public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> getAggregators() {
+  public ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> getAggregators() {
     return aggregators;
   }
 
   public void setAggregators(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators) {
+      final ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> aggregators) {
     this.aggregators = aggregators;
   }
 
@@ -180,7 +180,7 @@ public class GroupByDesc implements java
   public void setBucketGroup(boolean dataSorted) {
     bucketGroup = dataSorted;
   }
-  
+
   /**
    * Checks if this grouping is like distinct, which means that all non-distinct grouping
    * columns behave like they were distinct - for example min and max operators.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HashTableDummyDesc.java Wed Aug 29 17:43:59 2012
@@ -18,13 +18,12 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 /**
  * HashTable Dummy Descriptor implementation.
  *
  */
 @Explain(displayName = "HashTable Dummy Operator")
-public class HashTableDummyDesc implements Serializable {
+public class HashTableDummyDesc extends AbstractOperatorDesc {
   private TableDesc tbl;
 
   public TableDesc getTbl() {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java Wed Aug 29 17:43:59 2012
@@ -18,19 +18,20 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
+
 /**
  * Join operator Descriptor implementation.
  *
  */
 @Explain(displayName = "Join Operator")
-public class JoinDesc implements Serializable {
+public class JoinDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   public static final int INNER_JOIN = 0;
   public static final int LEFT_OUTER_JOIN = 1;
@@ -87,6 +88,53 @@ public class JoinDesc implements Seriali
     }
   }
 
+  @Override
+  public Object clone() {
+    JoinDesc ret = new JoinDesc();
+    Map<Byte,List<ExprNodeDesc>> cloneExprs = new HashMap<Byte,List<ExprNodeDesc>>();
+    cloneExprs.putAll(getExprs());
+    ret.setExprs(cloneExprs);
+    Map<Byte,List<ExprNodeDesc>> cloneFilters = new HashMap<Byte,List<ExprNodeDesc>>();
+    cloneFilters.putAll(getFilters());
+    ret.setFilters(cloneFilters);
+    ret.setConds(getConds().clone());
+    ret.setNoOuterJoin(getNoOuterJoin());
+    ret.setNullSafes(getNullSafes());
+    ret.setHandleSkewJoin(handleSkewJoin);
+    ret.setSkewKeyDefinition(getSkewKeyDefinition());
+    ret.setTagOrder(getTagOrder().clone());
+    if (getKeyTableDesc() != null) {
+      ret.setKeyTableDesc((TableDesc) getKeyTableDesc().clone());
+    }
+
+    if (getBigKeysDirMap() != null) {
+      Map<Byte, String> cloneBigKeysDirMap = new HashMap<Byte, String>();
+      cloneBigKeysDirMap.putAll(getBigKeysDirMap());
+      ret.setBigKeysDirMap(cloneBigKeysDirMap);
+    }
+    if (getSmallKeysDirMap() != null) {
+      Map<Byte, Map<Byte, String>> cloneSmallKeysDirMap = new HashMap<Byte, Map<Byte,String>> ();
+      cloneSmallKeysDirMap.putAll(getSmallKeysDirMap());
+      ret.setSmallKeysDirMap(cloneSmallKeysDirMap);
+    }
+    if (getSkewKeysValuesTables() != null) {
+      Map<Byte, TableDesc> cloneSkewKeysValuesTables = new HashMap<Byte, TableDesc>();
+      cloneSkewKeysValuesTables.putAll(getSkewKeysValuesTables());
+      ret.setSkewKeysValuesTables(cloneSkewKeysValuesTables);
+    }
+    if (getOutputColumnNames() != null) {
+      List<String> cloneOutputColumnNames = new ArrayList<String>();
+      cloneOutputColumnNames.addAll(getOutputColumnNames());
+      ret.setOutputColumnNames(cloneOutputColumnNames);
+    }
+    if (getReversedExprs() != null) {
+      Map<String, Byte> cloneReversedExprs = new HashMap<String, Byte>();
+      cloneReversedExprs.putAll(getReversedExprs());
+      ret.setReversedExprs(cloneReversedExprs);
+    }
+    return ret;
+  }
+
   public JoinDesc(final Map<Byte, List<ExprNodeDesc>> exprs,
       List<String> outputColumnNames, final boolean noOuterJoin,
       final JoinCondDesc[] conds) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewForwardDesc.java Wed Aug 29 17:43:59 2012
@@ -18,14 +18,13 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 
 /**
  * LateralViewForwardDesc.
  *
  */
 @Explain(displayName = "Lateral View Forward")
-public class LateralViewForwardDesc implements Serializable {
+public class LateralViewForwardDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
 
   public LateralViewForwardDesc() {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java Wed Aug 29 17:43:59 2012
@@ -18,15 +18,15 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 
+
 /**
  * LateralViewJoinDesc.
  *
  */
 @Explain(displayName = "Lateral View Join Operator")
-public class LateralViewJoinDesc implements Serializable {
+public class LateralViewJoinDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
 
   private ArrayList<String> outputInternalColNames;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java Wed Aug 29 17:43:59 2012
@@ -18,14 +18,13 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 
 /**
  * LimitDesc.
  *
  */
 @Explain(displayName = "Limit")
-public class LimitDesc implements Serializable {
+public class LimitDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   private int limit;
   private int leastRows = -1;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ListSinkDesc.java Wed Aug 29 17:43:59 2012
@@ -18,13 +18,12 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 
 /**
  * description for ListSinkOperator, just for explain result.
  */
 @Explain(displayName = "ListSink")
-public class ListSinkDesc implements Serializable {
+public class ListSinkDesc extends AbstractOperatorDesc {
 
   private static final long serialVersionUID = 1L;
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java Wed Aug 29 17:43:59 2012
@@ -33,22 +33,22 @@ import org.apache.hadoop.hive.ql.exec.Op
 public class MapredLocalWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
-  private LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork;
+  private LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork;
   private LinkedHashMap<String, FetchWork> aliasToFetchWork;
   private boolean inputFileChangeSensitive;
   private BucketMapJoinContext bucketMapjoinContext;
   private String tmpFileURI;
   private String stageID;
 
-  private List<Operator<? extends Serializable>> dummyParentOp ;
+  private List<Operator<? extends OperatorDesc>> dummyParentOp ;
 
   public MapredLocalWork() {
 
   }
 
   public MapredLocalWork(
-      final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
-      final LinkedHashMap<String, FetchWork> aliasToFetchWork) {
+    final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork,
+    final LinkedHashMap<String, FetchWork> aliasToFetchWork) {
     this.aliasToWork = aliasToWork;
     this.aliasToFetchWork = aliasToFetchWork;
 
@@ -61,18 +61,18 @@ public class MapredLocalWork implements 
   }
 
 
-  public void setDummyParentOp(List<Operator<? extends Serializable>> op){
+  public void setDummyParentOp(List<Operator<? extends OperatorDesc>> op){
     this.dummyParentOp=op;
   }
 
 
-  public List<Operator<? extends Serializable>> getDummyParentOp(){
+  public List<Operator<? extends OperatorDesc>> getDummyParentOp(){
     return this.dummyParentOp;
   }
 
 
   @Explain(displayName = "Alias -> Map Local Operator Tree")
-  public LinkedHashMap<String, Operator<? extends Serializable>> getAliasToWork() {
+  public LinkedHashMap<String, Operator<? extends OperatorDesc>> getAliasToWork() {
     return aliasToWork;
   }
 
@@ -85,7 +85,7 @@ public class MapredLocalWork implements 
   }
 
   public void setAliasToWork(
-      final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork) {
+    final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork) {
     this.aliasToWork = aliasToWork;
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java Wed Aug 29 17:43:59 2012
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.ByteArrayOutputStream;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -39,7 +38,7 @@ import org.apache.hadoop.hive.ql.parse.S
  *
  */
 @Explain(displayName = "Map Reduce")
-public class MapredWork implements Serializable {
+public class MapredWork extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   private String command;
   // map side work
@@ -49,7 +48,7 @@ public class MapredWork implements Seria
 
   private LinkedHashMap<String, PartitionDesc> pathToPartitionInfo;
 
-  private LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork;
+  private LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork;
 
   private LinkedHashMap<String, PartitionDesc> aliasToPartnInfo;
 
@@ -81,7 +80,7 @@ public class MapredWork implements Seria
 
   private String tmpHDFSFileURI;
 
-  private LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtxMap;
+  private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap;
 
   private QBJoinTree joinTree;
 
@@ -100,7 +99,7 @@ public class MapredWork implements Seria
       final String command,
       final LinkedHashMap<String, ArrayList<String>> pathToAliases,
       final LinkedHashMap<String, PartitionDesc> pathToPartitionInfo,
-      final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork,
+      final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork,
       final TableDesc keyDesc, List<TableDesc> tagToValueDesc,
       final Operator<?> reducer, final Integer numReduceTasks,
       final MapredLocalWork mapLocalWork,
@@ -167,12 +166,12 @@ public class MapredWork implements Seria
   }
 
   @Explain(displayName = "Alias -> Map Operator Tree")
-  public LinkedHashMap<String, Operator<? extends Serializable>> getAliasToWork() {
+  public LinkedHashMap<String, Operator<? extends OperatorDesc>> getAliasToWork() {
     return aliasToWork;
   }
 
   public void setAliasToWork(
-      final LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork) {
+      final LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork) {
     this.aliasToWork = aliasToWork;
   }
 
@@ -433,12 +432,13 @@ public class MapredWork implements Seria
     this.joinTree = joinTree;
   }
 
-  public LinkedHashMap<Operator<? extends Serializable>, OpParseContext> getOpParseCtxMap() {
+  public
+    LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> getOpParseCtxMap() {
     return opParseCtxMap;
   }
 
   public void setOpParseCtxMap(
-      LinkedHashMap<Operator<? extends Serializable>, OpParseContext> opParseCtxMap) {
+    LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtxMap) {
     this.opParseCtxMap = opParseCtxMap;
   }
 

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java?rev=1378659&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/OperatorDesc.java Wed Aug 29 17:43:59 2012
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+public interface OperatorDesc extends Serializable, Cloneable {
+  public Object clone() throws CloneNotSupportedException;
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java Wed Aug 29 17:43:59 2012
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -88,7 +87,7 @@ public final class PlanUtils {
     try {
       return new MapredWork("", new LinkedHashMap<String, ArrayList<String>>(),
         new LinkedHashMap<String, PartitionDesc>(),
-        new LinkedHashMap<String, Operator<? extends Serializable>>(),
+        new LinkedHashMap<String, Operator<? extends OperatorDesc>>(),
         new TableDesc(), new ArrayList<TableDesc>(), null, Integer.valueOf(1),
         null, Hive.get().getConf().getBoolVar(
           HiveConf.ConfVars.HIVE_COMBINE_INPUT_FORMAT_SUPPORTS_SPLITTABLE));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java Wed Aug 29 17:43:59 2012
@@ -18,15 +18,16 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
+import java.util.ArrayList;
 import java.util.List;
 
+
 /**
  * ReduceSinkDesc.
  *
  */
 @Explain(displayName = "Reduce Output Operator")
-public class ReduceSinkDesc implements Serializable {
+public class ReduceSinkDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   /**
    * Key columns are passed to reducer in the "key".
@@ -91,6 +92,29 @@ public class ReduceSinkDesc implements S
     this.distinctColumnIndices = distinctColumnIndices;
   }
 
+  @Override
+  public Object clone() {
+    ReduceSinkDesc desc = new ReduceSinkDesc();
+    desc.setKeyCols((ArrayList<ExprNodeDesc>) getKeyCols().clone());
+    desc.setValueCols((ArrayList<ExprNodeDesc>) getValueCols().clone());
+    desc.setOutputKeyColumnNames((ArrayList<String>) getOutputKeyColumnNames().clone());
+    List<List<Integer>> distinctColumnIndicesClone = new ArrayList<List<Integer>>();
+    for (List<Integer> distinctColumnIndex : getDistinctColumnIndices()) {
+      List<Integer> tmp = new ArrayList<Integer>();
+      tmp.addAll(distinctColumnIndex);
+      distinctColumnIndicesClone.add(tmp);
+    }
+    desc.setDistinctColumnIndices(distinctColumnIndicesClone);
+    desc.setOutputValueColumnNames((ArrayList<String>) getOutputValueColumnNames().clone());
+    desc.setNumDistributionKeys(getNumDistributionKeys());
+    desc.setTag(getTag());
+    desc.setNumReducers(getNumReducers());
+    desc.setPartitionCols((ArrayList<ExprNodeDesc>) getPartitionCols().clone());
+    desc.setKeySerializeInfo((TableDesc) getKeySerializeInfo().clone());
+    desc.setValueSerializeInfo((TableDesc) getValueSerializeInfo().clone());
+    return desc;
+  }
+
   public java.util.ArrayList<java.lang.String> getOutputKeyColumnNames() {
     return outputKeyColumnNames;
   }
@@ -186,7 +210,7 @@ public class ReduceSinkDesc implements S
 
   /**
    * Returns the sort order of the key columns.
-   * 
+   *
    * @return null, which means ascending order for all key columns, or a String
    *         of the same length as key columns, that consists of only "+"
    *         (ascending order) and "-" (descending order).
@@ -196,7 +220,7 @@ public class ReduceSinkDesc implements S
     return keySerializeInfo.getProperties().getProperty(
         org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER);
   }
-  
+
   public void setOrder(String orderStr) {
     keySerializeInfo.getProperties().setProperty(
         org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java Wed Aug 29 17:43:59 2012
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
-
 import org.apache.hadoop.hive.ql.exec.RecordReader;
 import org.apache.hadoop.hive.ql.exec.RecordWriter;
 
@@ -28,7 +26,7 @@ import org.apache.hadoop.hive.ql.exec.Re
  *
  */
 @Explain(displayName = "Transform Operator")
-public class ScriptDesc implements Serializable {
+public class ScriptDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
   private String scriptCmd;
   // Describe how to deserialize data back from user script

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java Wed Aug 29 17:43:59 2012
@@ -18,17 +18,18 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
+import java.util.ArrayList;
+
 
 /**
  * SelectDesc.
  *
  */
 @Explain(displayName = "Select Operator")
-public class SelectDesc implements Serializable {
+public class SelectDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
-  private java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList;
-  private java.util.ArrayList<java.lang.String> outputColumnNames;
+  private ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList;
+  private ArrayList<java.lang.String> outputColumnNames;
   private boolean selectStar;
   private boolean selStarNoCompute;
 
@@ -40,45 +41,55 @@ public class SelectDesc implements Seria
   }
 
   public SelectDesc(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
-      final java.util.ArrayList<java.lang.String> outputColumnNames) {
+    final ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
+    final ArrayList<java.lang.String> outputColumnNames) {
     this(colList, outputColumnNames, false);
   }
 
   public SelectDesc(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
-      java.util.ArrayList<java.lang.String> outputColumnNames,
-      final boolean selectStar) {
+    final ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
+    ArrayList<java.lang.String> outputColumnNames,
+    final boolean selectStar) {
     this.colList = colList;
     this.selectStar = selectStar;
     this.outputColumnNames = outputColumnNames;
   }
 
   public SelectDesc(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
-      final boolean selectStar, final boolean selStarNoCompute) {
+    final ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList,
+    final boolean selectStar, final boolean selStarNoCompute) {
     this.colList = colList;
     this.selectStar = selectStar;
     this.selStarNoCompute = selStarNoCompute;
   }
 
+  @Override
+  public Object clone() {
+    SelectDesc ret = new SelectDesc();
+    ret.setColList((ArrayList<ExprNodeDesc>)getColList().clone());
+    ret.setOutputColumnNames((ArrayList<String>)getOutputColumnNames().clone());
+    ret.setSelectStar(selectStar);
+    ret.setSelStarNoCompute(selStarNoCompute);
+    return ret;
+  }
+
   @Explain(displayName = "expressions")
-  public java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> getColList() {
+  public ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> getColList() {
     return colList;
   }
 
   public void setColList(
-      final java.util.ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList) {
+    final ArrayList<org.apache.hadoop.hive.ql.plan.ExprNodeDesc> colList) {
     this.colList = colList;
   }
 
   @Explain(displayName = "outputColumnNames")
-  public java.util.ArrayList<java.lang.String> getOutputColumnNames() {
+  public ArrayList<java.lang.String> getOutputColumnNames() {
     return outputColumnNames;
   }
 
   public void setOutputColumnNames(
-      java.util.ArrayList<java.lang.String> outputColumnNames) {
+    ArrayList<java.lang.String> outputColumnNames) {
     this.outputColumnNames = outputColumnNames;
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java Wed Aug 29 17:43:59 2012
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
+import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
@@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.metadat
  * things will be added here as table scan is invoked as part of local work.
  **/
 @Explain(displayName = "TableScan")
-public class TableScanDesc implements Serializable {
+public class TableScanDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
 
   private String alias;
@@ -71,6 +71,12 @@ public class TableScanDesc implements Se
     this.virtualCols = vcs;
   }
 
+  @Override
+  public Object clone() {
+    List<VirtualColumn> vcs = new ArrayList<VirtualColumn>(getVirtualCols());
+    return new TableScanDesc(getAlias(), vcs);
+  }
+
   @Explain(displayName = "alias")
   public String getAlias() {
     return alias;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java Wed Aug 29 17:43:59 2012
@@ -18,18 +18,16 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
-
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
 
 /**
  * All member variables should have a setters and getters of the form get<member
  * name> and set<member name> or else they won't be recreated properly at run
  * time.
- * 
+ *
  */
 @Explain(displayName = "UDTF Operator")
-public class UDTFDesc implements Serializable {
+public class UDTFDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
 
   private GenericUDTF genericUDTF;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java Wed Aug 29 17:43:59 2012
@@ -18,14 +18,13 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
 
 /**
  * unionDesc is a empty class currently. However, union has more than one input
  * (as compared with forward), and therefore, we need a separate class.
  **/
 @Explain(displayName = "Union")
-public class UnionDesc implements Serializable {
+public class UnionDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
 
   private transient int numInputs;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java Wed Aug 29 17:43:59 2012
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.ql.ppd;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -31,6 +30,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * Context for Expression Walker for determining predicate pushdown candidates
@@ -62,7 +62,7 @@ public class ExprWalkerInfo implements N
 
   protected static final Log LOG = LogFactory.getLog(OpProcFactory.class
       .getName());;
-  private Operator<? extends Serializable> op = null;
+  private Operator<? extends OperatorDesc> op = null;
   private RowResolver toRR = null;
 
   /**
@@ -105,7 +105,7 @@ public class ExprWalkerInfo implements N
     newToOldExprMap = new HashMap<ExprNodeDesc, ExprNodeDesc>();
   }
 
-  public ExprWalkerInfo(Operator<? extends Serializable> op,
+  public ExprWalkerInfo(Operator<? extends OperatorDesc> op,
       final RowResolver toRR) {
     this.op = op;
     this.toRR = toRR;
@@ -119,7 +119,7 @@ public class ExprWalkerInfo implements N
   /**
    * @return the op of this expression.
    */
-  public Operator<? extends Serializable> getOp() {
+  public Operator<? extends OperatorDesc> getOp() {
     return op;
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java Wed Aug 29 17:43:59 2012
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.ql.ppd;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -44,6 +43,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * Expression factory for predicate pushdown processing. Each processor
@@ -70,7 +70,7 @@ public final class ExprWalkerProcFactory
       ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx;
       ExprNodeColumnDesc colref = (ExprNodeColumnDesc) nd;
       RowResolver toRR = ctx.getToRR();
-      Operator<? extends Serializable> op = ctx.getOp();
+      Operator<? extends OperatorDesc> op = ctx.getOp();
       String[] colAlias = toRR.reverseLookup(colref.getColumn());
 
       boolean isCandidate = true;
@@ -230,8 +230,8 @@ public final class ExprWalkerProcFactory
   }
 
   public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext,
-      Operator<? extends Serializable> op, ExprNodeDesc pred)
-      throws SemanticException {
+    Operator<? extends OperatorDesc> op, ExprNodeDesc pred)
+    throws SemanticException {
     List<ExprNodeDesc> preds = new ArrayList<ExprNodeDesc>();
     preds.add(pred);
     return extractPushdownPreds(opContext, op, preds);
@@ -249,11 +249,11 @@ public final class ExprWalkerProcFactory
    * @throws SemanticException
    */
   public static ExprWalkerInfo extractPushdownPreds(OpWalkerInfo opContext,
-      Operator<? extends Serializable> op, List<ExprNodeDesc> preds)
-      throws SemanticException {
+    Operator<? extends OperatorDesc> op, List<ExprNodeDesc> preds)
+    throws SemanticException {
     // Create the walker, the rules dispatcher and the context.
     ExprWalkerInfo exprContext = new ExprWalkerInfo(op, opContext
-        .getRowResolver(op));
+      .getRowResolver(op));
 
     // create a walker which walks the tree in a DFS manner while maintaining
     // the operator stack. The dispatcher

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java Wed Aug 29 17:43:59 2012
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.ql.ppd;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -58,6 +57,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.FilterDesc;
 import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
 import org.apache.hadoop.hive.ql.plan.JoinDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -132,10 +132,10 @@ public final class OpProcFactory {
       // SELECT(*) because that's the way that the DAG was constructed. We
       // only want to get the predicates from the SELECT(*).
       ExprWalkerInfo childPreds = owi
-      .getPrunedPreds((Operator<? extends Serializable>) nd.getChildren()
+      .getPrunedPreds((Operator<? extends OperatorDesc>) nd.getChildren()
       .get(0));
 
-      owi.putPrunedPreds((Operator<? extends Serializable>) nd, childPreds);
+      owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, childPreds);
       return null;
     }
 
@@ -173,7 +173,8 @@ public final class OpProcFactory {
       LOG.info("Processing for " + nd.getName() + "("
           + ((Operator) nd).getIdentifier() + ")");
       OpWalkerInfo owi = (OpWalkerInfo) procCtx;
-      Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
+      Operator<? extends OperatorDesc> op =
+        (Operator<? extends OperatorDesc>) nd;
       ExprNodeDesc predicate = (((FilterOperator) nd).getConf()).getPredicate();
       ExprWalkerInfo ewi = new ExprWalkerInfo();
       // Don't push a sampling predicate since createFilter() always creates filter
@@ -186,7 +187,7 @@ public final class OpProcFactory {
           /* predicate is not deterministic */
           if (op.getChildren() != null && op.getChildren().size() == 1) {
             createFilter(op, owi
-                .getPrunedPreds((Operator<? extends Serializable>) (op
+                .getPrunedPreds((Operator<? extends OperatorDesc>) (op
                 .getChildren().get(0))), owi);
           }
           return null;
@@ -199,7 +200,7 @@ public final class OpProcFactory {
           }
         }
         logExpr(nd, ewi);
-        owi.putPrunedPreds((Operator<? extends Serializable>) nd, ewi);
+        owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi);
       }
       // merge it with children predicates
       boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, ewi, null, false);
@@ -233,7 +234,7 @@ public final class OpProcFactory {
       boolean hasUnpushedPredicates =
           mergeWithChildrenPred(nd, owi, null, null, false);
       ExprWalkerInfo prunePreds =
-          owi.getPrunedPreds((Operator<? extends Serializable>) nd);
+          owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd);
       if (prunePreds != null) {
         Set<String> toRemove = new HashSet<String>();
         // we don't push down any expressions that refer to aliases that can;t
@@ -294,7 +295,7 @@ public final class OpProcFactory {
     private void applyFilterTransitivity(JoinOperator nd, OpWalkerInfo owi)
         throws SemanticException {
       ExprWalkerInfo prunePreds =
-          owi.getPrunedPreds((Operator<? extends Serializable>) nd);
+          owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd);
       if (prunePreds != null) {
         // We want to use the row resolvers of the parents of the join op
         // because the rowresolver refers to the output columns of an operator
@@ -302,7 +303,7 @@ public final class OpProcFactory {
         // operator.
         Map<String, RowResolver> aliasToRR =
             new HashMap<String, RowResolver>();
-        for (Operator<? extends Serializable> o : (nd).getParentOperators()) {
+        for (Operator<? extends OperatorDesc> o : (nd).getParentOperators()) {
           for (String alias : owi.getRowResolver(o).getTableNames()){
             aliasToRR.put(alias, owi.getRowResolver(o));
           }
@@ -386,7 +387,7 @@ public final class OpProcFactory {
 
         for (Entry<String, List<ExprNodeDesc>> aliasToFilters
             : newFilters.entrySet()){
-          owi.getPrunedPreds((Operator<? extends Serializable>) nd)
+          owi.getPrunedPreds((Operator<? extends OperatorDesc>) nd)
             .addPushDowns(aliasToFilters.getKey(), aliasToFilters.getValue());
         }
       }
@@ -513,8 +514,9 @@ public final class OpProcFactory {
       if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
           HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
         if (hasUnpushedPredicates) {
-          Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
-          Operator<? extends Serializable> childOperator = op.getChildOperators().get(0);
+          Operator<? extends OperatorDesc> op =
+            (Operator<? extends OperatorDesc>) nd;
+          Operator<? extends OperatorDesc> childOperator = op.getChildOperators().get(0);
           if(childOperator.getParentOperators().size()==1) {
             owi.getCandidateFilterOps().clear();
           }
@@ -587,9 +589,10 @@ public final class OpProcFactory {
         // no-op for leafs
         return hasUnpushedPredicates;
       }
-      Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
+      Operator<? extends OperatorDesc> op =
+        (Operator<? extends OperatorDesc>) nd;
       ExprWalkerInfo childPreds = owi
-          .getPrunedPreds((Operator<? extends Serializable>) nd.getChildren()
+          .getPrunedPreds((Operator<? extends OperatorDesc>) nd.getChildren()
           .get(0));
       if (childPreds == null) {
         return hasUnpushedPredicates;
@@ -614,7 +617,7 @@ public final class OpProcFactory {
           hasUnpushedPredicates = true;
         }
       }
-      owi.putPrunedPreds((Operator<? extends Serializable>) nd, ewi);
+      owi.putPrunedPreds((Operator<? extends OperatorDesc>) nd, ewi);
       return hasUnpushedPredicates;
     }
 
@@ -624,9 +627,9 @@ public final class OpProcFactory {
       if (nd.getChildren() == null) {
         return null;
       }
-      Operator<? extends Serializable> op = (Operator<? extends Serializable>) nd;
+      Operator<? extends OperatorDesc> op = (Operator<? extends OperatorDesc>)nd;
       ExprWalkerInfo ewi = new ExprWalkerInfo();
-      for (Operator<? extends Serializable> child : op.getChildOperators()) {
+      for (Operator<? extends OperatorDesc> child : op.getChildOperators()) {
         ExprWalkerInfo childPreds = owi.getPrunedPreds(child);
         if (childPreds == null) {
           continue;
@@ -698,15 +701,15 @@ public final class OpProcFactory {
     }
 
     // add new filter op
-    List<Operator<? extends Serializable>> originalChilren = op
+    List<Operator<? extends OperatorDesc>> originalChilren = op
         .getChildOperators();
     op.setChildOperators(null);
     Operator<FilterDesc> output = OperatorFactory.getAndMakeChild(
         new FilterDesc(condn, false), new RowSchema(inputRR.getColumnInfos()),
         op);
     output.setChildOperators(originalChilren);
-    for (Operator<? extends Serializable> ch : originalChilren) {
-      List<Operator<? extends Serializable>> parentOperators = ch
+    for (Operator<? extends OperatorDesc> ch : originalChilren) {
+      List<Operator<? extends OperatorDesc>> parentOperators = ch
           .getParentOperators();
       int pos = parentOperators.indexOf(op);
       assert pos != -1;
@@ -720,13 +723,13 @@ public final class OpProcFactory {
         HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) {
       // remove the candidate filter ops
       for (FilterOperator fop : owi.getCandidateFilterOps()) {
-        List<Operator<? extends Serializable>> children = fop.getChildOperators();
-        List<Operator<? extends Serializable>> parents = fop.getParentOperators();
-        for (Operator<? extends Serializable> parent : parents) {
+        List<Operator<? extends OperatorDesc>> children = fop.getChildOperators();
+        List<Operator<? extends OperatorDesc>> parents = fop.getParentOperators();
+        for (Operator<? extends OperatorDesc> parent : parents) {
           parent.getChildOperators().addAll(children);
           parent.removeChild(fop);
         }
-        for (Operator<? extends Serializable> child : children) {
+        for (Operator<? extends OperatorDesc> child : children) {
           child.getParentOperators().addAll(parents);
           child.removeParent(fop);
         }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java Wed Aug 29 17:43:59 2012
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.ql.ppd;
 
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -30,6 +29,7 @@ import org.apache.hadoop.hive.ql.lib.Nod
 import org.apache.hadoop.hive.ql.parse.OpParseContext;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 
 /**
  * Context class for operator walker of predicate pushdown.
@@ -39,23 +39,24 @@ public class OpWalkerInfo implements Nod
    * Operator to Pushdown Predicates Map. This keeps track of the final pushdown
    * predicates for each operator as you walk the Op Graph from child to parent
    */
-  private final HashMap<Operator<? extends Serializable>, ExprWalkerInfo> opToPushdownPredMap;
-  private final Map<Operator<? extends Serializable>, OpParseContext> opToParseCtxMap;
+  private final HashMap<Operator<? extends OperatorDesc>, ExprWalkerInfo>
+    opToPushdownPredMap;
+  private final Map<Operator<? extends OperatorDesc>, OpParseContext> opToParseCtxMap;
   private final ParseContext pGraphContext;
   private final List<FilterOperator> candidateFilterOps;
 
   public OpWalkerInfo(ParseContext pGraphContext) {
     this.pGraphContext = pGraphContext;
     opToParseCtxMap = pGraphContext.getOpParseCtx();
-    opToPushdownPredMap = new HashMap<Operator<? extends Serializable>, ExprWalkerInfo>();
+    opToPushdownPredMap = new HashMap<Operator<? extends OperatorDesc>, ExprWalkerInfo>();
     candidateFilterOps = new ArrayList<FilterOperator>();
   }
 
-  public ExprWalkerInfo getPrunedPreds(Operator<? extends Serializable> op) {
+  public ExprWalkerInfo getPrunedPreds(Operator<? extends OperatorDesc> op) {
     return opToPushdownPredMap.get(op);
   }
 
-  public ExprWalkerInfo putPrunedPreds(Operator<? extends Serializable> op,
+  public ExprWalkerInfo putPrunedPreds(Operator<? extends OperatorDesc> op,
       ExprWalkerInfo value) {
     return opToPushdownPredMap.put(op, value);
   }
@@ -64,7 +65,7 @@ public class OpWalkerInfo implements Nod
     return opToParseCtxMap.get(op).getRowResolver();
   }
 
-  public OpParseContext put(Operator<? extends Serializable> key,
+  public OpParseContext put(Operator<? extends OperatorDesc> key,
       OpParseContext value) {
     return opToParseCtxMap.put(key, value);
   }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java Wed Aug 29 17:43:59 2012
@@ -18,8 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import java.io.Serializable;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
 
 import junit.framework.TestCase;
 
@@ -32,6 +35,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.FilterDesc;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.ScriptDesc;
@@ -334,7 +338,8 @@ public class TestOperators extends TestC
       CollectOperator cdop2 = (CollectOperator) OperatorFactory
           .get(CollectDesc.class);
       cdop2.setConf(cd);
-      LinkedHashMap<String, Operator<? extends Serializable>> aliasToWork = new LinkedHashMap<String, Operator<? extends Serializable>>();
+      LinkedHashMap<String, Operator<? extends OperatorDesc>> aliasToWork =
+        new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
       aliasToWork.put("a", cdop1);
       aliasToWork.put("b", cdop2);
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java?rev=1378659&r1=1378658&r2=1378659&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestPlan.java Wed Aug 29 17:43:59 2012
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.exec;
 
 import java.io.ByteArrayOutputStream;
-import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 
@@ -30,6 +29,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.FilterDesc;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -69,8 +69,8 @@ public class TestPlan extends TestCase {
       LinkedHashMap<String, PartitionDesc> pt = new LinkedHashMap<String, PartitionDesc>();
       pt.put("/tmp/testfolder", partDesc);
 
-      LinkedHashMap<String, Operator<? extends Serializable>> ao =
-        new LinkedHashMap<String, Operator<? extends Serializable>>();
+      LinkedHashMap<String, Operator<? extends OperatorDesc>> ao =
+        new LinkedHashMap<String, Operator<? extends OperatorDesc>>();
       ao.put("a", op);
 
       MapredWork mrwork = new MapredWork();