You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by he...@apache.org on 2012/10/01 05:52:11 UTC

svn commit: r1392193 [1/2] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/optimiz...

Author: heyongqiang
Date: Mon Oct  1 03:52:10 2012
New Revision: 1392193

URL: http://svn.apache.org/viewvc?rev=1392193&view=rev
Log:
revert r1392105 due to bylaw requirement mentioned by Carl Steinbach

Removed:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/BaseReduceSinkOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CorrelationCompositeOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CorrelationLocalSimulativeReduceSinkOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/CorrelationReducerDispatchOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/CorrelationOptimizer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/CorrelationOptimizerUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BaseReduceSinkDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CorrelationCompositeDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CorrelationLocalSimulativeReduceSinkDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CorrelationReducerDispatchDesc.java
    hive/trunk/ql/src/test/queries/clientpositive/correlationoptimizer1.q
    hive/trunk/ql/src/test/queries/clientpositive/correlationoptimizer2.q
    hive/trunk/ql/src/test/queries/clientpositive/correlationoptimizer3.q
    hive/trunk/ql/src/test/queries/clientpositive/correlationoptimizer4.q
    hive/trunk/ql/src/test/queries/clientpositive/correlationoptimizer5.q
    hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer1.q.out
    hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer2.q.out
    hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer3.q.out
    hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer4.q.out
    hive/trunk/ql/src/test/results/clientpositive/correlationoptimizer5.q.out
Modified:
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/conf/hive-default.xml.template
    hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
    hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
    hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Mon Oct  1 03:52:10 2012
@@ -495,7 +495,6 @@ public class HiveConf extends Configurat
     HIVEOPTBUCKETMAPJOIN("hive.optimize.bucketmapjoin", false), // optimize bucket map join
     HIVEOPTSORTMERGEBUCKETMAPJOIN("hive.optimize.bucketmapjoin.sortedmerge", false), // try to use sorted merge bucket map join
     HIVEOPTREDUCEDEDUPLICATION("hive.optimize.reducededuplication", true),
-    HIVEOPTCORRELATION("hive.optimize.correlation", false), // exploit intra-query correlations
 
     // optimize skewed join by changing the query plan at compile time
     HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME("hive.optimize.skewjoin.compiletime", false),

Modified: hive/trunk/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Mon Oct  1 03:52:10 2012
@@ -928,12 +928,6 @@
 </property>
 
 <property>
-  <name>hive.optimize.correlation</name>
-  <value>false</value>
-  <description>exploit intra-query correlations.</description>
-</property>
-
-<property>
   <name>hive.exec.dynamic.partition</name>
   <value>true</value>
   <description>Whether or not to allow dynamic partitions in DML/DDL.</description>

Modified: hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java (original)
+++ hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/OperatorType.java Mon Oct  1 03:52:10 2012
@@ -28,10 +28,7 @@ public enum OperatorType implements org.
   LATERALVIEWJOIN(14),
   LATERALVIEWFORWARD(15),
   HASHTABLESINK(16),
-  HASHTABLEDUMMY(17),
-  CORRELATIONCOMPOSITE(18),
-  CORRELATIONLOCALSIMULATIVEREDUCESINK(19),
-  CORRELATIONREDUCERDISPATCH(20);
+  HASHTABLEDUMMY(17);
 
   private final int value;
 
@@ -88,12 +85,6 @@ public enum OperatorType implements org.
         return HASHTABLESINK;
       case 17:
         return HASHTABLEDUMMY;
-      case 18:
-        return CORRELATIONCOMPOSITE;
-      case 19:
-        return CORRELATIONLOCALSIMULATIVEREDUCESINK;
-      case 20:
-        return CORRELATIONREDUCERDISPATCH;
       default:
         return null;
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecReducer.java Mon Oct  1 03:52:10 2012
@@ -25,7 +25,6 @@ import java.net.URLClassLoader;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Iterator;
-import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -62,7 +61,6 @@ public class ExecReducer extends MapRedu
   private Reporter rp;
   private boolean abort = false;
   private boolean isTagged = false;
-  private boolean isOperationPathTagged = false;
   private long cntr = 0;
   private long nextCntr = 1;
 
@@ -118,7 +116,6 @@ public class ExecReducer extends MapRedu
     reducer.setParentOperators(null); // clear out any parents as reducer is the
     // root
     isTagged = gWork.getNeedsTagging();
-    isOperationPathTagged = gWork.getNeedsOperationPathTagging();
     try {
       keyTableDesc = gWork.getKeyDesc();
       inputKeyDeserializer = (SerDe) ReflectionUtils.newInstance(keyTableDesc
@@ -167,9 +164,8 @@ public class ExecReducer extends MapRedu
 
   private BytesWritable groupKey;
 
-  List<Object> row = new ArrayList<Object>(4);
+  ArrayList<Object> row = new ArrayList<Object>(3);
   ByteWritable tag = new ByteWritable();
-  ByteWritable operationPathTags = new ByteWritable();
 
   public void reduce(Object key, Iterator values, OutputCollector output,
       Reporter reporter) throws IOException {
@@ -192,14 +188,6 @@ public class ExecReducer extends MapRedu
         keyWritable.setSize(size);
       }
 
-      operationPathTags.set((byte)0);
-      if (isOperationPathTagged) {
-        // remove the operation plan tag
-        int size = keyWritable.getSize() - 1;
-        operationPathTags.set(keyWritable.get()[size]);
-        keyWritable.setSize(size);
-      }
-
       if (!keyWritable.equals(groupKey)) {
         // If a operator wants to do some work at the beginning of a group
         if (groupKey == null) { // the first group
@@ -224,7 +212,6 @@ public class ExecReducer extends MapRedu
         l4j.trace("Start Group");
         reducer.startGroup();
         reducer.setGroupKeyObject(keyObject);
-        reducer.setBytesWritableGroupKey(groupKey);
       }
       // System.err.print(keyObject.toString());
       while (values.hasNext()) {
@@ -247,7 +234,6 @@ public class ExecReducer extends MapRedu
         row.add(valueObject[tag.get()]);
         // The tag is not used any more, we should remove it.
         row.add(tag);
-        row.add(operationPathTags);
         if (isLogInfoEnabled) {
           cntr++;
           if (cntr == nextCntr) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java Mon Oct  1 03:52:10 2012
@@ -144,13 +144,6 @@ public class GroupByOperator extends Ope
   private long maxMemory;
   private float memoryThreshold;
 
-  private boolean forcedForward;  // only used by CorrelationReducerDispatchOperator to make
-                                     // GroupByOperator has the same pace with other
-                                     // GroupByOperators and JoinOperators.
-                                     // If true and newKeys is different from currentKeys,
-                                     // data associated with currentKeys will be
-                                     // forwarded, otherwise, nothing happens.
-
   /**
    * This is used to store the position and field names for variable length
    * fields.
@@ -392,7 +385,6 @@ public class GroupByOperator extends Ope
     memoryMXBean = ManagementFactory.getMemoryMXBean();
     maxMemory = memoryMXBean.getHeapMemoryUsage().getMax();
     memoryThreshold = this.getConf().getMemoryThreshold();
-    forcedForward = false;
     initializeChildren(hconf);
   }
 
@@ -801,10 +793,6 @@ public class GroupByOperator extends Ope
     }
   }
 
-  public void setForcedForward(boolean forcedForward) {
-    this.forcedForward = forcedForward;
-  }
-
   // Non-hash aggregation
   private void processAggr(Object row, ObjectInspector rowInspector,
       KeyWrapper newKeys) throws HiveException {
@@ -818,16 +806,11 @@ public class GroupByOperator extends Ope
         newKeys.equals(currentKeys) : false;
 
     // Forward the current keys if needed for sort-based aggregation
-    if (currentKeys != null && (!keysAreEqual || forcedForward)) {
+    if (currentKeys != null && !keysAreEqual) {
       forward(currentKeys.getKeyArray(), aggregations);
       countAfterReport = 0;
     }
 
-    if (forcedForward) {
-      currentKeys = null;
-      return;
-    }
-
     // Need to update the keys?
     if (currentKeys == null || !keysAreEqual) {
       if (currentKeys == null) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java Mon Oct  1 03:52:10 2012
@@ -39,7 +39,6 @@ import org.apache.hadoop.hive.ql.plan.ap
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.mapred.OutputCollector;
@@ -1416,52 +1415,4 @@ public abstract class Operator<T extends
   public void setUseBucketizedHiveInputFormat(boolean useBucketizedHiveInputFormat) {
     this.useBucketizedHiveInputFormat = useBucketizedHiveInputFormat;
   }
-
-  // bytesWritableGroupKey is only used when a query plan is optimized by CorrelationOptimizer.
-  // CorrelationLocalSimulativeReduceSinkOperator will use this variable to determine when it needs to start or end the group
-  // for its child operator.
-  protected BytesWritable bytesWritableGroupKey;
-
-  public void setBytesWritableGroupKey(BytesWritable groupKey) {
-    if (bytesWritableGroupKey == null) {
-      bytesWritableGroupKey = new BytesWritable();
-    }
-    bytesWritableGroupKey.set(groupKey.get(), 0, groupKey.getSize());
-  }
-
-  public BytesWritable getBytesWritableGroupKey() {
-    return bytesWritableGroupKey;
-  }
-
-  // The number of current row
-  protected long rowNumber;
-
-  public void initializeRowNumber() {
-    this.rowNumber = 0L;
-    LOG.info("Operator " + id + " " + getName() + " row number initialized to 0");
-    if (childOperators == null) {
-      return;
-    }
-    LOG.info("Initializing row numbers of children of " + id + " " + getName());
-    for (int i = 0; i < childOperatorsArray.length; i++) {
-      childOperatorsArray[i].initializeRowNumber();
-    }
-  }
-
-  public void setRowNumber(long rowNumber) throws HiveException {
-    this.rowNumber = rowNumber;
-    if (childOperators == null) {
-      return;
-    }
-    for (int i = 0; i < childOperatorsArray.length; i++) {
-      assert rowNumber >= childOperatorsArray[i].getRowNumber();
-      if (rowNumber != childOperatorsArray[i].getRowNumber()) {
-        childOperatorsArray[i].setRowNumber(rowNumber);
-      }
-    }
-  }
-
-  public long getRowNumber() {
-    return rowNumber;
-  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorFactory.java Mon Oct  1 03:52:10 2012
@@ -22,9 +22,6 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.hadoop.hive.ql.plan.CollectDesc;
-import org.apache.hadoop.hive.ql.plan.CorrelationCompositeDesc;
-import org.apache.hadoop.hive.ql.plan.CorrelationLocalSimulativeReduceSinkDesc;
-import org.apache.hadoop.hive.ql.plan.CorrelationReducerDispatchDesc;
 import org.apache.hadoop.hive.ql.plan.ExtractDesc;
 import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
 import org.apache.hadoop.hive.ql.plan.FilterDesc;
@@ -94,12 +91,6 @@ public final class OperatorFactory {
         HashTableDummyOperator.class));
     opvec.add(new OpTuple<HashTableSinkDesc>(HashTableSinkDesc.class,
         HashTableSinkOperator.class));
-    opvec.add(new OpTuple<CorrelationCompositeDesc>(CorrelationCompositeDesc.class,
-            CorrelationCompositeOperator.class));
-    opvec.add(new OpTuple<CorrelationReducerDispatchDesc>(CorrelationReducerDispatchDesc.class,
-        CorrelationReducerDispatchOperator.class));
-    opvec.add(new OpTuple<CorrelationLocalSimulativeReduceSinkDesc>(CorrelationLocalSimulativeReduceSinkDesc.class,
-        CorrelationLocalSimulativeReduceSinkOperator.class));
   }
 
   public static <T extends OperatorDesc> Operator<T> get(Class<T> opClass) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java Mon Oct  1 03:52:10 2012
@@ -21,50 +21,179 @@ package org.apache.hadoop.hive.ql.exec;
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Random;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.io.HiveKey;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.Serializer;
+import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
 
 /**
  * Reduce Sink Operator sends output to the reduce stage.
  **/
-public class ReduceSinkOperator extends BaseReduceSinkOperator<ReduceSinkDesc>
+public class ReduceSinkOperator extends TerminalOperator<ReduceSinkDesc>
     implements Serializable {
 
   private static final long serialVersionUID = 1L;
 
-  private final List<Integer> operationPathTags = new ArrayList<Integer>(); // operation path tags
-  private final byte[] operationPathTagsByte = new byte[1];
+  /**
+   * The evaluators for the key columns. Key columns decide the sort order on
+   * the reducer side. Key columns are passed to the reducer in the "key".
+   */
+  protected transient ExprNodeEvaluator[] keyEval;
+  /**
+   * The evaluators for the value columns. Value columns are passed to reducer
+   * in the "value".
+   */
+  protected transient ExprNodeEvaluator[] valueEval;
+  /**
+   * The evaluators for the partition columns (CLUSTER BY or DISTRIBUTE BY in
+   * Hive language). Partition columns decide the reducer that the current row
+   * goes to. Partition columns are not passed to reducer.
+   */
+  protected transient ExprNodeEvaluator[] partitionEval;
+
+  // TODO: we use MetadataTypedColumnsetSerDe for now, till DynamicSerDe is
+  // ready
+  transient Serializer keySerializer;
+  transient boolean keyIsText;
+  transient Serializer valueSerializer;
+  transient int tag;
+  transient byte[] tagByte = new byte[1];
+  transient protected int numDistributionKeys;
+  transient protected int numDistinctExprs;
+
+  @Override
+  protected void initializeOp(Configuration hconf) throws HiveException {
+
+    try {
+      keyEval = new ExprNodeEvaluator[conf.getKeyCols().size()];
+      int i = 0;
+      for (ExprNodeDesc e : conf.getKeyCols()) {
+        keyEval[i++] = ExprNodeEvaluatorFactory.get(e);
+      }
+
+      numDistributionKeys = conf.getNumDistributionKeys();
+      distinctColIndices = conf.getDistinctColumnIndices();
+      numDistinctExprs = distinctColIndices.size();
+
+      valueEval = new ExprNodeEvaluator[conf.getValueCols().size()];
+      i = 0;
+      for (ExprNodeDesc e : conf.getValueCols()) {
+        valueEval[i++] = ExprNodeEvaluatorFactory.get(e);
+      }
+
+      partitionEval = new ExprNodeEvaluator[conf.getPartitionCols().size()];
+      i = 0;
+      for (ExprNodeDesc e : conf.getPartitionCols()) {
+        partitionEval[i++] = ExprNodeEvaluatorFactory.get(e);
+      }
 
-  public void setOperationPathTags(List<Integer> operationPathTags) {
-    this.operationPathTags.addAll(operationPathTags);
-    int operationPathTagsInt = 0;
-    int tmp = 1;
-    for (Integer operationPathTag: operationPathTags) {
-      operationPathTagsInt += tmp << operationPathTag.intValue();
+      tag = conf.getTag();
+      tagByte[0] = (byte) tag;
+      LOG.info("Using tag = " + tag);
+
+      TableDesc keyTableDesc = conf.getKeySerializeInfo();
+      keySerializer = (Serializer) keyTableDesc.getDeserializerClass()
+          .newInstance();
+      keySerializer.initialize(null, keyTableDesc.getProperties());
+      keyIsText = keySerializer.getSerializedClass().equals(Text.class);
+
+      TableDesc valueTableDesc = conf.getValueSerializeInfo();
+      valueSerializer = (Serializer) valueTableDesc.getDeserializerClass()
+          .newInstance();
+      valueSerializer.initialize(null, valueTableDesc.getProperties());
+
+      firstRow = true;
+      initializeChildren(hconf);
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw new RuntimeException(e);
     }
-    operationPathTagsByte[0] = (byte) operationPathTagsInt;
   }
 
-  public List<Integer> getOperationPathTags() {
-    return this.operationPathTags;
+  transient InspectableObject tempInspectableObject = new InspectableObject();
+  transient HiveKey keyWritable = new HiveKey();
+  transient Writable value;
+
+  transient StructObjectInspector keyObjectInspector;
+  transient StructObjectInspector valueObjectInspector;
+  transient ObjectInspector[] partitionObjectInspectors;
+
+  transient Object[][] cachedKeys;
+  transient Object[] cachedValues;
+  transient List<List<Integer>> distinctColIndices;
+
+  boolean firstRow;
+
+  transient Random random;
+
+  /**
+   * Initializes array of ExprNodeEvaluator. Adds Union field for distinct
+   * column indices for group by.
+   * Puts the return values into a StructObjectInspector with output column
+   * names.
+   *
+   * If distinctColIndices is empty, the object inspector is same as
+   * {@link Operator#initEvaluatorsAndReturnStruct(ExprNodeEvaluator[], List, ObjectInspector)}
+   */
+  protected static StructObjectInspector initEvaluatorsAndReturnStruct(
+      ExprNodeEvaluator[] evals, List<List<Integer>> distinctColIndices,
+      List<String> outputColNames,
+      int length, ObjectInspector rowInspector)
+      throws HiveException {
+    int inspectorLen = evals.length > length ? length + 1 : evals.length;
+    List<ObjectInspector> sois = new ArrayList<ObjectInspector>(inspectorLen);
+
+    // keys
+    ObjectInspector[] fieldObjectInspectors = initEvaluators(evals, 0, length, rowInspector);
+    sois.addAll(Arrays.asList(fieldObjectInspectors));
+
+    if (evals.length > length) {
+      // union keys
+      List<ObjectInspector> uois = new ArrayList<ObjectInspector>();
+      for (List<Integer> distinctCols : distinctColIndices) {
+        List<String> names = new ArrayList<String>();
+        List<ObjectInspector> eois = new ArrayList<ObjectInspector>();
+        int numExprs = 0;
+        for (int i : distinctCols) {
+          names.add(HiveConf.getColumnInternalName(numExprs));
+          eois.add(evals[i].initialize(rowInspector));
+          numExprs++;
+        }
+        uois.add(ObjectInspectorFactory.getStandardStructObjectInspector(names, eois));
+      }
+      UnionObjectInspector uoi =
+        ObjectInspectorFactory.getStandardUnionObjectInspector(uois);
+      sois.add(uoi);
+    }
+    return ObjectInspectorFactory.getStandardStructObjectInspector(outputColNames, sois );
   }
 
   @Override
   public void processOp(Object row, int tag) throws HiveException {
     try {
       ObjectInspector rowInspector = inputObjInspectors[tag];
-      if (isFirstRow) {
-        isFirstRow = false;
+      if (firstRow) {
+        firstRow = false;
         keyObjectInspector = initEvaluatorsAndReturnStruct(keyEval,
             distinctColIndices,
             conf.getOutputKeyColumnNames(), numDistributionKeys, rowInspector);
@@ -138,18 +267,9 @@ public class ReduceSinkOperator extends 
             keyWritable.set(key.getBytes(), 0, key.getLength());
           } else {
             int keyLength = key.getLength();
-            if (!this.getConf().getNeedsOperationPathTagging()) {
-              keyWritable.setSize(keyLength + 1);
-            } else {
-              keyWritable.setSize(keyLength + 2);
-            }
+            keyWritable.setSize(keyLength + 1);
             System.arraycopy(key.getBytes(), 0, keyWritable.get(), 0, keyLength);
-            if (!this.getConf().getNeedsOperationPathTagging()) {
-              keyWritable.get()[keyLength] = tagByte[0];
-            } else {
-              keyWritable.get()[keyLength] = operationPathTagsByte[0];
-              keyWritable.get()[keyLength + 1] = tagByte[0];
-            }
+            keyWritable.get()[keyLength] = tagByte[0];
           }
         } else {
           // Must be BytesWritable
@@ -159,18 +279,9 @@ public class ReduceSinkOperator extends 
             keyWritable.set(key.getBytes(), 0, key.getLength());
           } else {
             int keyLength = key.getLength();
-            if (!this.getConf().getNeedsOperationPathTagging()) {
-              keyWritable.setSize(keyLength + 1);
-            } else {
-              keyWritable.setSize(keyLength + 2);
-            }
+            keyWritable.setSize(keyLength + 1);
             System.arraycopy(key.getBytes(), 0, keyWritable.get(), 0, keyLength);
-            if (!this.getConf().getNeedsOperationPathTagging()) {
-              keyWritable.get()[keyLength] = tagByte[0];
-            } else {
-              keyWritable.get()[keyLength] = operationPathTagsByte[0];
-              keyWritable.get()[keyLength + 1] = tagByte[0];
-            }
+            keyWritable.get()[keyLength] = tagByte[0];
           }
         }
         keyWritable.setHashCode(keyHashCode);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java Mon Oct  1 03:52:10 2012
@@ -80,9 +80,6 @@ public class TableScanOperator extends O
     if (conf != null && conf.isGatherStats()) {
       gatherStats(row);
     }
-    if (conf != null && conf.isForwardRowNumber()) {
-      setRowNumber(rowNumber+1);
-    }
     forward(row, inputObjInspectors[tag]);
   }
 
@@ -172,12 +169,6 @@ public class TableScanOperator extends O
     if (conf == null) {
       return;
     }
-
-    LOG.info(this.getName() + " forward row number " + conf.isForwardRowNumber());
-    if(conf.isForwardRowNumber()){
-      initializeRowNumber();
-    }
-
     if (!conf.isGatherStats()) {
       return;
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java Mon Oct  1 03:52:10 2012
@@ -116,11 +116,6 @@ public final class GenMapRedUtils {
     }
     if (reducer.getClass() == JoinOperator.class) {
       plan.setNeedsTagging(true);
-      plan.setNeedsOperationPathTagging(false);
-    }
-    if (op.getConf().getNeedsOperationPathTagging()) {
-      plan.setNeedsTagging(true);
-      plan.setNeedsOperationPathTagging(true);
     }
 
     assert currTopOp != null;
@@ -187,7 +182,6 @@ public final class GenMapRedUtils {
         opTaskMap.put(reducer, currTask);
         if (reducer.getClass() == JoinOperator.class) {
           plan.setNeedsTagging(true);
-          plan.setNeedsOperationPathTagging(false);
         }
         ReduceSinkDesc desc = (ReduceSinkDesc) op.getConf();
         plan.setNumReduceTasks(desc.getNumReducers());
@@ -322,7 +316,6 @@ public final class GenMapRedUtils {
 
     if (reducer.getClass() == JoinOperator.class) {
       plan.setNeedsTagging(true);
-      plan.setNeedsOperationPathTagging(false);
     }
 
     initUnionPlan(opProcCtx, unionTask, false);
@@ -1073,7 +1066,6 @@ public final class GenMapRedUtils {
       // dependent on the redTask
       if (reducer.getClass() == JoinOperator.class) {
         cplan.setNeedsTagging(true);
-        cplan.setNeedsOperationPathTagging(false);
       }
     }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java Mon Oct  1 03:52:10 2012
@@ -46,18 +46,6 @@ public class Optimizer {
    */
   public void initialize(HiveConf hiveConf) {
     transformations = new ArrayList<Transform>();
-    // Add correlation optimizer for first phase query plan tree analysis.
-    // The first phase will record original opColumnExprMap, opParseCtx, opRowResolver,
-    // since these may be changed by other optimizers (e.g. entries in opColumnExprMap may be deleted).
-    // If hive.groupby.skewindata is on, CorrelationOptimizer will not be applied.
-    // TODO: Make correlation optimizer 1 phase.
-    CorrelationOptimizer correlationOptimizer = new CorrelationOptimizer();
-    if(HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCORRELATION) &&
-        !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEGROUPBYSKEW) &&
-        !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME)) {
-      // TODO: make CorrelationOptimizer compatible with SkewJoinOptimizer
-      transformations.add(correlationOptimizer);
-    }
     // Add the transformation that computes the lineage information.
     transformations.add(new Generator());
     if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCP)) {
@@ -95,13 +83,6 @@ public class Optimizer {
     if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVELIMITOPTENABLE)) {
       transformations.add(new GlobalLimitOptimizer());
     }
-    // The second phase of correlation optimizer used for correlation detection and query plan tree transformation.
-    // The second phase should be the last optimizer added into transformations.
-    if(HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCORRELATION) &&
-        !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEGROUPBYSKEW) &&
-        !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME)) {
-      transformations.add(correlationOptimizer);
-    }
     transformations.add(new SimpleFetchOptimizer());  // must be called last
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Mon Oct  1 03:52:10 2012
@@ -34,7 +34,6 @@ import org.apache.hadoop.hive.ql.exec.Gr
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.hooks.LineageInfo;
@@ -87,11 +86,6 @@ public class ParseContext {
   private Map<GroupByOperator, Set<String>> groupOpToInputTables;
   private Map<String, PrunedPartitionList> prunedPartitions;
 
-  //a map from non-map-side group by pattern (RS-GBY) to map-side group by pattern (GBY-RS-GBY)
-  Map<ReduceSinkOperator, GroupByOperator> groupbyNonMapSide2MapSide;
-  //a map from map-side group by pattern (GBY-RS-GBY) to non-map-side group by pattern (RS-GBY)
-  Map<GroupByOperator, ReduceSinkOperator> groupbyMapSide2NonMapSide;
-
   /**
    * The lineage information.
    */
@@ -175,9 +169,7 @@ public class ParseContext {
       GlobalLimitCtx globalLimitCtx,
       HashMap<String, SplitSample> nameToSplitSample,
       HashSet<ReadEntity> semanticInputs, List<Task<? extends Serializable>> rootTasks,
-      Map<TableScanOperator, ExprNodeDesc> opToSkewedPruner,
-      Map<ReduceSinkOperator, GroupByOperator> groupbyNonMapSide2MapSide,
-      Map<GroupByOperator, ReduceSinkOperator> groupbyMapSide2NonMapSide) {
+      Map<TableScanOperator, ExprNodeDesc> opToSkewedPruner) {
     this.conf = conf;
     this.qb = qb;
     this.ast = ast;
@@ -204,8 +196,6 @@ public class ParseContext {
     this.semanticInputs = semanticInputs;
     this.rootTasks = rootTasks;
     this.opToSkewedPruner = opToSkewedPruner;
-    this.groupbyNonMapSide2MapSide = groupbyNonMapSide2MapSide;
-    this.groupbyMapSide2NonMapSide = groupbyMapSide2NonMapSide;
   }
 
   /**
@@ -548,7 +538,7 @@ public class ParseContext {
   }
 
   public void replaceRootTask(Task<? extends Serializable> rootTask,
-      List<? extends Task<? extends Serializable>> tasks) {
+                              List<? extends Task<? extends Serializable>> tasks) {
     this.rootTasks.remove(rootTask);
     this.rootTasks.addAll(tasks);
   }
@@ -586,11 +576,4 @@ public class ParseContext {
     this.opToSkewedPruner = opToSkewedPruner;
   }
 
-  public Map<ReduceSinkOperator, GroupByOperator> getGroupbyNonMapSide2MapSide() {
-    return groupbyNonMapSide2MapSide;
-  }
-
-  public Map<GroupByOperator, ReduceSinkOperator> getGroupbyMapSide2NonMapSide() {
-    return groupbyMapSide2NonMapSide;
-  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Mon Oct  1 03:52:10 2012
@@ -188,7 +188,7 @@ public class SemanticAnalyzer extends Ba
   private List<LoadTableDesc> loadTableWork;
   private List<LoadFileDesc> loadFileWork;
   private Map<JoinOperator, QBJoinTree> joinContext;
-  private HashMap<TableScanOperator, Table> topToTable;
+  private final HashMap<TableScanOperator, Table> topToTable;
   private QB qb;
   private ASTNode ast;
   private int destTableId;
@@ -210,11 +210,6 @@ public class SemanticAnalyzer extends Ba
   private final UnparseTranslator unparseTranslator;
   private final GlobalLimitCtx globalLimitCtx = new GlobalLimitCtx();
 
-  // a map from non-map-side group by pattern (RS-GBY) to map-side group by pattern (GBY-RS-GBY)
-  Map<ReduceSinkOperator, GroupByOperator> groupbyNonMapSide2MapSide;
-  // a map from map-side group by pattern (GBY-RS-GBY) to non-map-side group by pattern (RS-GBY)
-  Map<GroupByOperator, ReduceSinkOperator> groupbyMapSide2NonMapSide;
-
   //prefix for column names auto generated by hive
   private final String autogenColAliasPrfxLbl;
   private final boolean autogenColAliasPrfxIncludeFuncName;
@@ -253,8 +248,6 @@ public class SemanticAnalyzer extends Ba
                          HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME);
     queryProperties = new QueryProperties();
     opToSkewedPruner = new HashMap<TableScanOperator, ExprNodeDesc>();
-    groupbyNonMapSide2MapSide = new HashMap<ReduceSinkOperator, GroupByOperator>();
-    groupbyMapSide2NonMapSide = new HashMap<GroupByOperator, ReduceSinkOperator>();
   }
 
   @Override
@@ -273,9 +266,6 @@ public class SemanticAnalyzer extends Ba
     opParseCtx.clear();
     groupOpToInputTables.clear();
     prunedPartitions.clear();
-    topToTable.clear();
-    groupbyNonMapSide2MapSide.clear();
-    groupbyMapSide2NonMapSide.clear();
   }
 
   public void init(ParseContext pctx) {
@@ -283,7 +273,6 @@ public class SemanticAnalyzer extends Ba
     opToPartList = pctx.getOpToPartList();
     opToSamplePruner = pctx.getOpToSamplePruner();
     topOps = pctx.getTopOps();
-    topToTable = pctx.getTopToTable();
     topSelOps = pctx.getTopSelOps();
     opParseCtx = pctx.getOpParseCtx();
     loadTableWork = pctx.getLoadTableWork();
@@ -299,8 +288,6 @@ public class SemanticAnalyzer extends Ba
     prunedPartitions = pctx.getPrunedPartitions();
     fetchTask = pctx.getFetchTask();
     setLineageInfo(pctx.getLineageInfo());
-    groupbyNonMapSide2MapSide = pctx.getGroupbyNonMapSide2MapSide();
-    groupbyMapSide2NonMapSide = pctx.getGroupbyMapSide2NonMapSide();
   }
 
   public ParseContext getParseContext() {
@@ -308,8 +295,7 @@ public class SemanticAnalyzer extends Ba
         topSelOps, opParseCtx, joinContext, topToTable, loadTableWork,
         loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
         listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
-        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToSkewedPruner,
-        groupbyNonMapSide2MapSide, groupbyMapSide2NonMapSide);
+        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToSkewedPruner);
   }
 
   @SuppressWarnings("nls")
@@ -2921,7 +2907,7 @@ public class SemanticAnalyzer extends Ba
         colExprMap);
 
     List<List<Integer>> distinctColIndices = getDistinctColIndicesForReduceSink(parseInfo, dest,
-        reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames, colExprMap);
+        reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames);
 
     ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
     HashMap<String, ASTNode> aggregationTrees = parseInfo
@@ -2929,7 +2915,7 @@ public class SemanticAnalyzer extends Ba
 
     if (!mapAggrDone) {
       getReduceValuesForReduceSinkNoMapAgg(parseInfo, dest, reduceSinkInputRowResolver,
-          reduceSinkOutputRowResolver, outputValueColumnNames, reduceValues, colExprMap);
+          reduceSinkOutputRowResolver, outputValueColumnNames, reduceValues);
     } else {
       // Put partial aggregation results in reduceValues
       int inputField = reduceKeys.size();
@@ -2938,16 +2924,14 @@ public class SemanticAnalyzer extends Ba
 
         TypeInfo type = reduceSinkInputRowResolver.getColumnInfos().get(
             inputField).getType();
-        ExprNodeDesc expr = new ExprNodeColumnDesc(type,
-            getColumnInternalName(inputField), "", false);
-        reduceValues.add(expr);
+        reduceValues.add(new ExprNodeColumnDesc(type,
+            getColumnInternalName(inputField), "", false));
         inputField++;
         outputValueColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
         String field = Utilities.ReduceField.VALUE.toString() + "."
             + getColumnInternalName(reduceValues.size() - 1);
-        ColumnInfo colInfo = new ColumnInfo(field, type, null, false);
-        reduceSinkOutputRowResolver.putExpression(entry.getValue(), colInfo);
-        colExprMap.put(colInfo.getInternalName(), expr);
+        reduceSinkOutputRowResolver.putExpression(entry.getValue(),
+            new ColumnInfo(field, type, null, false));
       }
     }
 
@@ -2992,8 +2976,7 @@ public class SemanticAnalyzer extends Ba
 
   private List<List<Integer>> getDistinctColIndicesForReduceSink(QBParseInfo parseInfo, String dest,
       ArrayList<ExprNodeDesc> reduceKeys, RowResolver reduceSinkInputRowResolver,
-      RowResolver reduceSinkOutputRowResolver, List<String> outputKeyColumnNames,
-      Map<String, ExprNodeDesc> colExprMap)
+      RowResolver reduceSinkOutputRowResolver, List<String> outputKeyColumnNames)
       throws SemanticException {
 
     List<List<Integer>> distinctColIndices = new ArrayList<List<Integer>>();
@@ -3032,7 +3015,6 @@ public class SemanticAnalyzer extends Ba
           ColumnInfo colInfo = new ColumnInfo(field, expr.getTypeInfo(), null, false);
           reduceSinkOutputRowResolver.putExpression(parameter, colInfo);
           numExprs++;
-          colExprMap.put(colInfo.getInternalName(), expr);
         }
         distinctColIndices.add(distinctIndices);
       }
@@ -3043,8 +3025,7 @@ public class SemanticAnalyzer extends Ba
 
   private void getReduceValuesForReduceSinkNoMapAgg(QBParseInfo parseInfo, String dest,
       RowResolver reduceSinkInputRowResolver, RowResolver reduceSinkOutputRowResolver,
-      List<String> outputValueColumnNames, ArrayList<ExprNodeDesc> reduceValues,
-      Map<String, ExprNodeDesc> colExprMap)
+      List<String> outputValueColumnNames, ArrayList<ExprNodeDesc> reduceValues)
       throws SemanticException {
     HashMap<String, ASTNode> aggregationTrees = parseInfo
         .getAggregationExprsForClause(dest);
@@ -3056,16 +3037,15 @@ public class SemanticAnalyzer extends Ba
       for (int i = 1; i < value.getChildCount(); i++) {
         ASTNode parameter = (ASTNode) value.getChild(i);
         if (reduceSinkOutputRowResolver.getExpression(parameter) == null) {
-          ExprNodeDesc expr = genExprNodeDesc(parameter, reduceSinkInputRowResolver);
-          reduceValues.add(expr);
+          reduceValues.add(genExprNodeDesc(parameter,
+              reduceSinkInputRowResolver));
           outputValueColumnNames
               .add(getColumnInternalName(reduceValues.size() - 1));
           String field = Utilities.ReduceField.VALUE.toString() + "."
               + getColumnInternalName(reduceValues.size() - 1);
-          ColumnInfo colInfo = new ColumnInfo(field,
-              reduceValues.get(reduceValues.size() - 1).getTypeInfo(), null, false);
-          reduceSinkOutputRowResolver.putExpression(parameter, colInfo);
-          colExprMap.put(colInfo.getInternalName(), expr);
+          reduceSinkOutputRowResolver.putExpression(parameter, new ColumnInfo(field,
+              reduceValues.get(reduceValues.size() - 1).getTypeInfo(), null,
+              false));
         }
       }
     }
@@ -3096,7 +3076,7 @@ public class SemanticAnalyzer extends Ba
         colExprMap);
 
     List<List<Integer>> distinctColIndices = getDistinctColIndicesForReduceSink(parseInfo, dest,
-        reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames, colExprMap);
+        reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames);
 
     ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
 
@@ -3105,7 +3085,7 @@ public class SemanticAnalyzer extends Ba
     for (String destination : dests) {
 
       getReduceValuesForReduceSinkNoMapAgg(parseInfo, destination, reduceSinkInputRowResolver,
-          reduceSinkOutputRowResolver, outputValueColumnNames, reduceValues, colExprMap);
+          reduceSinkOutputRowResolver, outputValueColumnNames, reduceValues);
 
       // Need to pass all of the columns used in the where clauses as reduce values
       ASTNode whereClause = parseInfo.getWhrForClause(destination);
@@ -3115,18 +3095,15 @@ public class SemanticAnalyzer extends Ba
         for (int i = 0; i < columnExprs.size(); i++) {
           ASTNode parameter = columnExprs.get(i);
           if (reduceSinkOutputRowResolver.getExpression(parameter) == null) {
-            ExprNodeDesc expr = genExprNodeDesc(parameter,
-                reduceSinkInputRowResolver);
-            reduceValues.add(expr);
+            reduceValues.add(genExprNodeDesc(parameter,
+                reduceSinkInputRowResolver));
             outputValueColumnNames
                 .add(getColumnInternalName(reduceValues.size() - 1));
             String field = Utilities.ReduceField.VALUE.toString() + "."
                 + getColumnInternalName(reduceValues.size() - 1);
-            ColumnInfo colInfo = new ColumnInfo(field,
+            reduceSinkOutputRowResolver.putExpression(parameter, new ColumnInfo(field,
                 reduceValues.get(reduceValues.size() - 1).getTypeInfo(), null,
-                false);
-            reduceSinkOutputRowResolver.putExpression(parameter, colInfo);
-            colExprMap.put(colInfo.getInternalName(), expr);
+                false));
           }
         }
       }
@@ -3223,16 +3200,13 @@ public class SemanticAnalyzer extends Ba
       ASTNode t = entry.getValue();
       TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(t)
           .getType();
-      ExprNodeColumnDesc inputExpr = new ExprNodeColumnDesc(typeInfo, field,
-          "", false);
-      reduceValues.add(inputExpr);
+      reduceValues.add(new ExprNodeColumnDesc(typeInfo, field, "", false));
       inputField++;
       String col = getColumnInternalName(reduceValues.size() - 1);
       outputColumnNames.add(col);
-      ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.VALUE.toString()
-          + "." + col, typeInfo, "", false);
-      reduceSinkOutputRowResolver2.putExpression(t, colInfo);
-      colExprMap.put(colInfo.getInternalName(), inputExpr);
+      reduceSinkOutputRowResolver2.putExpression(t, new ColumnInfo(
+          Utilities.ReduceField.VALUE.toString() + "." + col, typeInfo, "",
+          false));
     }
 
     ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
@@ -5944,7 +5918,6 @@ public class SemanticAnalyzer extends Ba
               reduceValues.size() - 1).getTypeInfo(), "", false);
           reduceSinkOutputRowResolver.putExpression(grpbyExpr, colInfo);
           outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
-          colExprMap.put(colInfo.getInternalName(), grpByExprNode);
         }
       }
 
@@ -5971,7 +5944,6 @@ public class SemanticAnalyzer extends Ba
             reduceSinkOutputRowResolver.putExpression(paraExpr, colInfo);
             outputColumnNames
                 .add(getColumnInternalName(reduceValues.size() - 1));
-            colExprMap.put(colInfo.getInternalName(), paraExprNode);
           }
         }
       }
@@ -6210,23 +6182,7 @@ public class SemanticAnalyzer extends Ba
                 curr = insertSelectAllPlanForGroupBy(curr);
                 if (conf.getBoolVar(HiveConf.ConfVars.HIVEMAPSIDEAGGREGATE)) {
                   if (!conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
-                    Operator rsopInNonMapSidePattern = null;
-                    Operator mapSideGroupBy = null;
-                    if (conf.getBoolVar(HiveConf.ConfVars.HIVEOPTCORRELATION)) {
-                      Operator nonMapSidePattern = genGroupByPlan1MR(dest, qb, curr);
-                      rsopInNonMapSidePattern = (Operator) nonMapSidePattern
-                          .getParentOperators().get(0);
-                      curr.getChildOperators().remove(rsopInNonMapSidePattern);
-                    }
                     curr = genGroupByPlanMapAggr1MR(dest, qb, curr);
-                    mapSideGroupBy = (Operator) ((Operator) curr.getParentOperators().get(0))
-                        .getParentOperators().get(0);
-                    if (conf.getBoolVar(HiveConf.ConfVars.HIVEOPTCORRELATION)) {
-                      groupbyNonMapSide2MapSide.put((ReduceSinkOperator) rsopInNonMapSidePattern,
-                          (GroupByOperator) mapSideGroupBy);
-                      groupbyMapSide2NonMapSide.put((GroupByOperator) mapSideGroupBy,
-                          (ReduceSinkOperator) rsopInNonMapSidePattern);
-                    }
                   } else {
                     curr = genGroupByPlanMapAggr2MR(dest, qb, curr);
                   }
@@ -7639,8 +7595,7 @@ public class SemanticAnalyzer extends Ba
         opToPartList, topOps, topSelOps, opParseCtx, joinContext, topToTable,
         loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
         listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
-        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToSkewedPruner,
-        groupbyNonMapSide2MapSide, groupbyMapSide2NonMapSide);
+        opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks, opToSkewedPruner);
 
     Optimizer optm = new Optimizer();
     optm.setPctx(pCtx);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java Mon Oct  1 03:52:10 2012
@@ -71,7 +71,6 @@ public class MapredWork extends Abstract
   private Long minSplitSizePerRack;
 
   private boolean needsTagging;
-  private boolean needsOperationPathTagging;
   private boolean hadoopSupportsSplittable;
 
   private MapredLocalWork mapLocalWork;
@@ -340,16 +339,6 @@ public class MapredWork extends Abstract
     this.needsTagging = needsTagging;
   }
 
-  //TODO: enable the annotation shown below
-  // @Explain(displayName = "Needs Operation Paths Tagging", normalExplain = false)
-  public boolean getNeedsOperationPathTagging() {
-    return needsOperationPathTagging;
-  }
-
-  public void setNeedsOperationPathTagging(boolean needsOperationPathTagging) {
-    this.needsOperationPathTagging = needsOperationPathTagging;
-  }
-
   public boolean getHadoopSupportsSplittable() {
     return hadoopSupportsSplittable;
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java Mon Oct  1 03:52:10 2012
@@ -27,44 +27,58 @@ import java.util.List;
  *
  */
 @Explain(displayName = "Reduce Output Operator")
-public class ReduceSinkDesc extends BaseReduceSinkDesc {
+public class ReduceSinkDesc extends AbstractOperatorDesc {
   private static final long serialVersionUID = 1L;
+  /**
+   * Key columns are passed to reducer in the "key".
+   */
+  private java.util.ArrayList<ExprNodeDesc> keyCols;
+  private java.util.ArrayList<java.lang.String> outputKeyColumnNames;
+  private List<List<Integer>> distinctColumnIndices;
+  /**
+   * Value columns are passed to reducer in the "value".
+   */
+  private java.util.ArrayList<ExprNodeDesc> valueCols;
+  private java.util.ArrayList<java.lang.String> outputValueColumnNames;
+  /**
+   * Describe how to serialize the key.
+   */
+  private TableDesc keySerializeInfo;
+  /**
+   * Describe how to serialize the value.
+   */
+  private TableDesc valueSerializeInfo;
+
+  /**
+   * The tag for this reducesink descriptor.
+   */
+  private int tag;
+
+  /**
+   * Number of distribution keys.
+   */
+  private int numDistributionKeys;
+
+  /**
+   * The partition columns (CLUSTER BY or DISTRIBUTE BY in Hive language).
+   * Partition columns decide the reducer that the current row goes to.
+   * Partition columns are not passed to reducer.
+   */
+  private java.util.ArrayList<ExprNodeDesc> partitionCols;
 
-  private boolean needsOperationPathTagging;
-
-  public boolean getNeedsOperationPathTagging() {
-    return needsOperationPathTagging;
-  }
-
-  public void setNeedsOperationPathTagging(boolean isOperationPathTagged) {
-    this.needsOperationPathTagging = isOperationPathTagged;
-  }
+  private int numReducers;
 
   public ReduceSinkDesc() {
   }
 
-  public ReduceSinkDesc(ArrayList<ExprNodeDesc> keyCols,
-	      int numDistributionKeys,
-	      ArrayList<ExprNodeDesc> valueCols,
-	      ArrayList<String> outputKeyColumnNames,
-	      List<List<Integer>> distinctColumnIndices,
-	      ArrayList<String> outputValueColumnNames, int tag,
-	      ArrayList<ExprNodeDesc> partitionCols, int numReducers,
-	      final TableDesc keySerializeInfo, final TableDesc valueSerializeInfo) {
-    this(keyCols, numDistributionKeys, valueCols,
-      outputKeyColumnNames, distinctColumnIndices, outputValueColumnNames, tag,
-      partitionCols, numReducers, keySerializeInfo, valueSerializeInfo, false);
-  }
-
-  public ReduceSinkDesc(ArrayList<ExprNodeDesc> keyCols,
+  public ReduceSinkDesc(java.util.ArrayList<ExprNodeDesc> keyCols,
       int numDistributionKeys,
-      ArrayList<ExprNodeDesc> valueCols,
-      ArrayList<String> outputKeyColumnNames,
+      java.util.ArrayList<ExprNodeDesc> valueCols,
+      java.util.ArrayList<java.lang.String> outputKeyColumnNames,
       List<List<Integer>> distinctColumnIndices,
-      ArrayList<String> outputValueColumnNames, int tag,
-      ArrayList<ExprNodeDesc> partitionCols, int numReducers,
-      final TableDesc keySerializeInfo, final TableDesc valueSerializeInfo,
-      boolean needsOperationPathTagging) {
+      java.util.ArrayList<java.lang.String> outputValueColumnNames, int tag,
+      java.util.ArrayList<ExprNodeDesc> partitionCols, int numReducers,
+      final TableDesc keySerializeInfo, final TableDesc valueSerializeInfo) {
     this.keyCols = keyCols;
     this.numDistributionKeys = numDistributionKeys;
     this.valueCols = valueCols;
@@ -76,7 +90,6 @@ public class ReduceSinkDesc extends Base
     this.keySerializeInfo = keySerializeInfo;
     this.valueSerializeInfo = valueSerializeInfo;
     this.distinctColumnIndices = distinctColumnIndices;
-    this.needsOperationPathTagging = needsOperationPathTagging;
   }
 
   @Override
@@ -99,7 +112,127 @@ public class ReduceSinkDesc extends Base
     desc.setPartitionCols((ArrayList<ExprNodeDesc>) getPartitionCols().clone());
     desc.setKeySerializeInfo((TableDesc) getKeySerializeInfo().clone());
     desc.setValueSerializeInfo((TableDesc) getValueSerializeInfo().clone());
-    desc.setNeedsOperationPathTagging(needsOperationPathTagging);
     return desc;
   }
+
+  public java.util.ArrayList<java.lang.String> getOutputKeyColumnNames() {
+    return outputKeyColumnNames;
+  }
+
+  public void setOutputKeyColumnNames(
+      java.util.ArrayList<java.lang.String> outputKeyColumnNames) {
+    this.outputKeyColumnNames = outputKeyColumnNames;
+  }
+
+  public java.util.ArrayList<java.lang.String> getOutputValueColumnNames() {
+    return outputValueColumnNames;
+  }
+
+  public void setOutputValueColumnNames(
+      java.util.ArrayList<java.lang.String> outputValueColumnNames) {
+    this.outputValueColumnNames = outputValueColumnNames;
+  }
+
+  @Explain(displayName = "key expressions")
+  public java.util.ArrayList<ExprNodeDesc> getKeyCols() {
+    return keyCols;
+  }
+
+  public void setKeyCols(final java.util.ArrayList<ExprNodeDesc> keyCols) {
+    this.keyCols = keyCols;
+  }
+
+  public int getNumDistributionKeys() {
+    return this.numDistributionKeys;
+  }
+
+  public void setNumDistributionKeys(int numKeys) {
+    this.numDistributionKeys = numKeys;
+  }
+
+  @Explain(displayName = "value expressions")
+  public java.util.ArrayList<ExprNodeDesc> getValueCols() {
+    return valueCols;
+  }
+
+  public void setValueCols(final java.util.ArrayList<ExprNodeDesc> valueCols) {
+    this.valueCols = valueCols;
+  }
+
+  @Explain(displayName = "Map-reduce partition columns")
+  public java.util.ArrayList<ExprNodeDesc> getPartitionCols() {
+    return partitionCols;
+  }
+
+  public void setPartitionCols(
+      final java.util.ArrayList<ExprNodeDesc> partitionCols) {
+    this.partitionCols = partitionCols;
+  }
+
+  @Explain(displayName = "tag")
+  public int getTag() {
+    return tag;
+  }
+
+  public void setTag(int tag) {
+    this.tag = tag;
+  }
+
+  /**
+   * Returns the number of reducers for the map-reduce job. -1 means to decide
+   * the number of reducers at runtime. This enables Hive to estimate the number
+   * of reducers based on the map-reduce input data size, which is only
+   * available right before we start the map-reduce job.
+   */
+  public int getNumReducers() {
+    return numReducers;
+  }
+
+  public void setNumReducers(int numReducers) {
+    this.numReducers = numReducers;
+  }
+
+  public TableDesc getKeySerializeInfo() {
+    return keySerializeInfo;
+  }
+
+  public void setKeySerializeInfo(TableDesc keySerializeInfo) {
+    this.keySerializeInfo = keySerializeInfo;
+  }
+
+  public TableDesc getValueSerializeInfo() {
+    return valueSerializeInfo;
+  }
+
+  public void setValueSerializeInfo(TableDesc valueSerializeInfo) {
+    this.valueSerializeInfo = valueSerializeInfo;
+  }
+
+  /**
+   * Returns the sort order of the key columns.
+   *
+   * @return null, which means ascending order for all key columns, or a String
+   *         of the same length as key columns, that consists of only "+"
+   *         (ascending order) and "-" (descending order).
+   */
+  @Explain(displayName = "sort order")
+  public String getOrder() {
+    return keySerializeInfo.getProperties().getProperty(
+        org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER);
+  }
+
+  public void setOrder(String orderStr) {
+    keySerializeInfo.getProperties().setProperty(
+        org.apache.hadoop.hive.serde.Constants.SERIALIZATION_SORT_ORDER,
+        orderStr);
+  }
+
+  public List<List<Integer>> getDistinctColumnIndices() {
+    return distinctColumnIndices;
+  }
+
+  public void setDistinctColumnIndices(
+      List<List<Integer>> distinctColumnIndices) {
+    this.distinctColumnIndices = distinctColumnIndices;
+  }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java Mon Oct  1 03:52:10 2012
@@ -50,8 +50,6 @@ public class TableScanDesc extends Abstr
   private boolean gatherStats;
   private boolean statsReliable;
 
-  private boolean forwardRowNumber = false;
-
   private ExprNodeDesc filterExpr;
 
   public static final String FILTER_EXPR_CONF_STR =
@@ -105,14 +103,6 @@ public class TableScanDesc extends Abstr
     return partColumns;
   }
 
-  public boolean isForwardRowNumber() {
-    return forwardRowNumber;
-  }
-
-  public void setForwardRowNumber(boolean forwardRowNumber) {
-    this.forwardRowNumber = forwardRowNumber;
-  }
-
   public void setGatherStats(boolean gatherStats) {
     this.gatherStats = gatherStats;
   }

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Mon Oct  1 03:52:10 2012
@@ -277,7 +277,6 @@ public class TestExecDriver extends Test
   private void populateMapRedPlan3(Table src, Table src2) throws SemanticException {
     mr.setNumReduceTasks(Integer.valueOf(5));
     mr.setNeedsTagging(true);
-    mr.setNeedsOperationPathTagging(false);
     ArrayList<String> outputColumns = new ArrayList<String>();
     for (int i = 0; i < 2; i++) {
       outputColumns.add("_col" + i);

Modified: hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml Mon Oct  1 03:52:10 2012
@@ -351,24 +351,6 @@
                         </void> 
                        </object> 
                       </void> 
-                      <void method="put"> 
-                       <string>VALUE._col0</string> 
-                       <object id="ExprNodeColumnDesc1" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>_col1</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string></string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
-                          <void property="typeName"> 
-                           <string>double</string> 
-                          </void> 
-                         </object> 
-                        </void> 
-                       </object> 
-                      </void> 
                      </object> 
                     </void> 
                     <void property="conf"> 
@@ -441,7 +423,21 @@
                       <void property="valueCols"> 
                        <object class="java.util.ArrayList"> 
                         <void method="add"> 
-                         <object idref="ExprNodeColumnDesc1"/> 
+                         <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+                          <void property="column"> 
+                           <string>_col1</string> 
+                          </void> 
+                          <void property="tabAlias"> 
+                           <string></string> 
+                          </void> 
+                          <void property="typeInfo"> 
+                           <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
+                            <void property="typeName"> 
+                             <string>double</string> 
+                            </void> 
+                           </object> 
+                          </void> 
+                         </object> 
                         </void> 
                        </object> 
                       </void> 
@@ -538,7 +534,7 @@
                  <object class="java.util.HashMap"> 
                   <void method="put"> 
                    <string>_col0</string> 
-                   <object id="ExprNodeColumnDesc2" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+                   <object id="ExprNodeColumnDesc1" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
                     <void property="column"> 
                      <string>key</string> 
                     </void> 
@@ -634,7 +630,7 @@
                   <void property="keys"> 
                    <object class="java.util.ArrayList"> 
                     <void method="add"> 
-                     <object idref="ExprNodeColumnDesc2"/> 
+                     <object idref="ExprNodeColumnDesc1"/> 
                     </void> 
                    </object> 
                   </void> 
@@ -1298,7 +1294,7 @@
            <object class="java.util.HashMap"> 
             <void method="put"> 
              <string>_col1</string> 
-             <object id="ExprNodeColumnDesc3" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+             <object id="ExprNodeColumnDesc2" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
               <void property="column"> 
                <string>_col1</string> 
               </void> 
@@ -1312,7 +1308,7 @@
             </void> 
             <void method="put"> 
              <string>_col0</string> 
-             <object id="ExprNodeColumnDesc4" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+             <object id="ExprNodeColumnDesc3" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
               <void property="column"> 
                <string>_col0</string> 
               </void> 
@@ -1331,10 +1327,10 @@
             <void property="colList"> 
              <object class="java.util.ArrayList"> 
               <void method="add"> 
-               <object idref="ExprNodeColumnDesc4"/> 
+               <object idref="ExprNodeColumnDesc3"/> 
               </void> 
               <void method="add"> 
-               <object idref="ExprNodeColumnDesc3"/> 
+               <object idref="ExprNodeColumnDesc2"/> 
               </void> 
              </object> 
             </void> 
@@ -1412,7 +1408,7 @@
        <object class="java.util.HashMap"> 
         <void method="put"> 
          <string>_col0</string> 
-         <object id="ExprNodeColumnDesc5" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+         <object id="ExprNodeColumnDesc4" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
           <void property="column"> 
            <string>KEY._col0</string> 
           </void> 
@@ -1464,7 +1460,7 @@
         <void property="keys"> 
          <object class="java.util.ArrayList"> 
           <void method="add"> 
-           <object idref="ExprNodeColumnDesc5"/> 
+           <object idref="ExprNodeColumnDesc4"/> 
           </void> 
          </object> 
         </void> 

Modified: hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml?rev=1392193&r1=1392192&r2=1392193&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml Mon Oct  1 03:52:10 2012
@@ -217,56 +217,6 @@
                         </void> 
                        </object> 
                       </void> 
-                      <void method="put"> 
-                       <string>VALUE._col1</string> 
-                       <object id="ExprNodeColumnDesc1" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>_col3</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string></string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
-                          <void property="typeName"> 
-                           <string>double</string> 
-                          </void> 
-                         </object> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void method="put"> 
-                       <string>KEY._col1:0._col0</string> 
-                       <object id="ExprNodeColumnDesc2" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>_col1</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string></string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object idref="PrimitiveTypeInfo0"/> 
-                        </void> 
-                       </object> 
-                      </void> 
-                      <void method="put"> 
-                       <string>VALUE._col0</string> 
-                       <object id="ExprNodeColumnDesc3" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
-                        <void property="column"> 
-                         <string>_col2</string> 
-                        </void> 
-                        <void property="tabAlias"> 
-                         <string></string> 
-                        </void> 
-                        <void property="typeInfo"> 
-                         <object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
-                          <void property="typeName"> 
-                           <string>bigint</string> 
-                          </void> 
-                         </object> 
-                        </void> 
-                       </object> 
-                      </void> 
                      </object> 
                     </void> 
                     <void property="conf"> 
@@ -288,7 +238,17 @@
                          <object idref="ExprNodeColumnDesc0"/> 
                         </void> 
                         <void method="add"> 
-                         <object idref="ExprNodeColumnDesc2"/> 
+                         <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+                          <void property="column"> 
+                           <string>_col1</string> 
+                          </void> 
+                          <void property="tabAlias"> 
+                           <string></string> 
+                          </void> 
+                          <void property="typeInfo"> 
+                           <object idref="PrimitiveTypeInfo0"/> 
+                          </void> 
+                         </object> 
                         </void> 
                        </object> 
                       </void> 
@@ -360,10 +320,38 @@
                       <void property="valueCols"> 
                        <object class="java.util.ArrayList"> 
                         <void method="add"> 
-                         <object idref="ExprNodeColumnDesc3"/> 
+                         <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+                          <void property="column"> 
+                           <string>_col2</string> 
+                          </void> 
+                          <void property="tabAlias"> 
+                           <string></string> 
+                          </void> 
+                          <void property="typeInfo"> 
+                           <object id="PrimitiveTypeInfo1" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
+                            <void property="typeName"> 
+                             <string>bigint</string> 
+                            </void> 
+                           </object> 
+                          </void> 
+                         </object> 
                         </void> 
                         <void method="add"> 
-                         <object idref="ExprNodeColumnDesc1"/> 
+                         <object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+                          <void property="column"> 
+                           <string>_col3</string> 
+                          </void> 
+                          <void property="tabAlias"> 
+                           <string></string> 
+                          </void> 
+                          <void property="typeInfo"> 
+                           <object id="PrimitiveTypeInfo2" class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"> 
+                            <void property="typeName"> 
+                             <string>double</string> 
+                            </void> 
+                           </object> 
+                          </void> 
+                         </object> 
                         </void> 
                        </object> 
                       </void> 
@@ -454,7 +442,7 @@
                            <string>VALUE._col0</string> 
                           </void> 
                           <void property="type"> 
-                           <object idref="PrimitiveTypeInfo2"/> 
+                           <object idref="PrimitiveTypeInfo1"/> 
                           </void> 
                          </object> 
                         </void> 
@@ -464,7 +452,7 @@
                            <string>VALUE._col1</string> 
                           </void> 
                           <void property="type"> 
-                           <object idref="PrimitiveTypeInfo1"/> 
+                           <object idref="PrimitiveTypeInfo2"/> 
                           </void> 
                          </object> 
                         </void> 
@@ -833,7 +821,7 @@
                        <string></string> 
                       </void> 
                       <void property="type"> 
-                       <object idref="PrimitiveTypeInfo2"/> 
+                       <object idref="PrimitiveTypeInfo1"/> 
                       </void> 
                      </object> 
                     </void> 
@@ -846,7 +834,7 @@
                        <string></string> 
                       </void> 
                       <void property="type"> 
-                       <object idref="PrimitiveTypeInfo1"/> 
+                       <object idref="PrimitiveTypeInfo2"/> 
                       </void> 
                      </object> 
                     </void> 
@@ -873,7 +861,7 @@
                  <string>src</string> 
                 </void> 
                 <void property="typeInfo"> 
-                 <object idref="PrimitiveTypeInfo2"/> 
+                 <object idref="PrimitiveTypeInfo1"/> 
                 </void> 
                </object> 
               </void> 
@@ -1096,7 +1084,7 @@
                <string>src</string> 
               </void> 
               <void property="type"> 
-               <object idref="PrimitiveTypeInfo2"/> 
+               <object idref="PrimitiveTypeInfo1"/> 
               </void> 
              </object> 
             </void> 
@@ -1435,7 +1423,7 @@
                      <string></string> 
                     </void> 
                     <void property="type"> 
-                     <object idref="PrimitiveTypeInfo2"/> 
+                     <object idref="PrimitiveTypeInfo1"/> 
                     </void> 
                    </object> 
                   </void> 
@@ -1489,7 +1477,7 @@
                    <string></string> 
                   </void> 
                   <void property="typeInfo"> 
-                   <object idref="PrimitiveTypeInfo1"/> 
+                   <object idref="PrimitiveTypeInfo2"/> 
                   </void> 
                  </object> 
                 </void> 
@@ -1515,7 +1503,7 @@
             </void> 
             <void method="put"> 
              <string>_col1</string> 
-             <object id="ExprNodeColumnDesc4" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+             <object id="ExprNodeColumnDesc1" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
               <void property="column"> 
                <string>_col1</string> 
               </void> 
@@ -1523,13 +1511,13 @@
                <string></string> 
               </void> 
               <void property="typeInfo"> 
-               <object idref="PrimitiveTypeInfo2"/> 
+               <object idref="PrimitiveTypeInfo1"/> 
               </void> 
              </object> 
             </void> 
             <void method="put"> 
              <string>_col0</string> 
-             <object id="ExprNodeColumnDesc5" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+             <object id="ExprNodeColumnDesc2" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
               <void property="column"> 
                <string>_col0</string> 
               </void> 
@@ -1548,10 +1536,10 @@
             <void property="colList"> 
              <object class="java.util.ArrayList"> 
               <void method="add"> 
-               <object idref="ExprNodeColumnDesc5"/> 
+               <object idref="ExprNodeColumnDesc2"/> 
               </void> 
               <void method="add"> 
-               <object idref="ExprNodeColumnDesc4"/> 
+               <object idref="ExprNodeColumnDesc1"/> 
               </void> 
               <void method="add"> 
                <object idref="ExprNodeGenericFuncDesc2"/> 
@@ -1625,7 +1613,7 @@
                  <string>_col1</string> 
                 </void> 
                 <void property="type"> 
-                 <object idref="PrimitiveTypeInfo2"/> 
+                 <object idref="PrimitiveTypeInfo1"/> 
                 </void> 
                </object> 
               </void> 
@@ -1654,7 +1642,7 @@
        <object class="java.util.HashMap"> 
         <void method="put"> 
          <string>_col0</string> 
-         <object id="ExprNodeColumnDesc6" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+         <object id="ExprNodeColumnDesc3" class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
           <void property="column"> 
            <string>KEY._col0</string> 
           </void> 
@@ -1722,7 +1710,7 @@
                  <string>VALUE._col1</string> 
                 </void> 
                 <void property="typeInfo"> 
-                 <object idref="PrimitiveTypeInfo1"/> 
+                 <object idref="PrimitiveTypeInfo2"/> 
                 </void> 
                </object> 
               </void> 
@@ -1738,7 +1726,7 @@
         <void property="keys"> 
          <object class="java.util.ArrayList"> 
           <void method="add"> 
-           <object idref="ExprNodeColumnDesc6"/> 
+           <object idref="ExprNodeColumnDesc3"/> 
           </void> 
          </object> 
         </void> 
@@ -1817,7 +1805,7 @@
              <string></string> 
             </void> 
             <void property="type"> 
-             <object idref="PrimitiveTypeInfo2"/> 
+             <object idref="PrimitiveTypeInfo1"/> 
             </void> 
            </object> 
           </void> 
@@ -1830,7 +1818,7 @@
              <string></string> 
             </void> 
             <void property="type"> 
-             <object idref="PrimitiveTypeInfo1"/> 
+             <object idref="PrimitiveTypeInfo2"/> 
             </void> 
            </object> 
           </void>