You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dh...@apache.org on 2008/10/21 20:30:34 UTC

svn commit: r706708 [5/13] - in /hadoop/core/branches/branch-0.19: ./ src/contrib/hive/ src/contrib/hive/bin/ src/contrib/hive/cli/src/java/org/apache/hadoop/hive/cli/ src/contrib/hive/common/src/java/org/apache/hadoop/hive/conf/ src/contrib/hive/conf/...

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/exprNodeFieldDesc.java Tue Oct 21 11:29:18 2008
@@ -25,13 +25,18 @@
 public class exprNodeFieldDesc extends exprNodeDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   exprNodeDesc desc;
-  String fieldName;
+  String fieldName;
+  
+  // Used to support a.b where a is a list of struct that contains a field called b.
+  // a.b will return an array that contains field b of all elements of array a. 
+  Boolean isList;
   
   public exprNodeFieldDesc() {}
-  public exprNodeFieldDesc(TypeInfo typeInfo, exprNodeDesc desc, String fieldName) {
+  public exprNodeFieldDesc(TypeInfo typeInfo, exprNodeDesc desc, String fieldName, Boolean isList) {
     super(typeInfo);
     this.desc = desc;
-    this.fieldName = fieldName;    
+    this.fieldName = fieldName;
+    this.isList = isList;
   }
   
   public exprNodeDesc getDesc() {
@@ -45,7 +50,14 @@
   }
   public void setFieldName(String fieldName) {
     this.fieldName = fieldName;
-  }
+  }
+  public Boolean getIsList() {
+    return isList;
+  }
+  public void setIsList(Boolean isList) {
+    this.isList = isList;
+  }
+  
   @Override
   public String toString() {
     return this.desc.toString() + "." + this.fieldName;

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/loadFileDesc.java Tue Oct 21 11:29:18 2008
@@ -19,22 +19,28 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import java.util.List;
+
 import org.apache.hadoop.hive.ql.plan.loadDesc;
 
 public class loadFileDesc extends loadDesc implements Serializable {
   private static final long serialVersionUID = 1L;
   private String targetDir;
   private boolean isDfsDir;
+  // list of columns, comma separated
+  private String  columns;
 
   public loadFileDesc() { }
   public loadFileDesc(
     final String sourceDir,
     final String targetDir,
-    final boolean isDfsDir) {
+    final boolean isDfsDir, 
+    final String  columns) {
 
     super(sourceDir);
     this.targetDir = targetDir;
     this.isDfsDir = isDfsDir;
+    this.columns = columns;
   }
   
   @explain(displayName="destination")
@@ -52,4 +58,18 @@
   public void setIsDfsDir(final boolean isDfsDir) {
     this.isDfsDir = isDfsDir;
   }
+  
+	/**
+	 * @return the columns
+	 */
+	public String getColumns() {
+		return columns;
+	}
+	
+	/**
+	 * @param columns the columns to set
+	 */
+	public void setColumns(String columns) {
+		this.columns = columns;
+	}
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/mapredWork.java Tue Oct 21 11:29:18 2008
@@ -49,6 +49,7 @@
   private Integer numReduceTasks;
   
   private boolean needsTagging;
+  private boolean inferNumReducers;
 
   public mapredWork() { }
   public mapredWork(
@@ -197,4 +198,13 @@
   public void setNeedsTagging(boolean needsTagging) {
     this.needsTagging = needsTagging;
   }
+
+  public boolean getInferNumReducers() {
+    return this.inferNumReducers;
+  }
+  
+  public void setInferNumReducers(boolean inferNumReducers) {
+    this.inferNumReducers = inferNumReducers;
+  }
+
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/reduceSinkDesc.java Tue Oct 21 11:29:18 2008
@@ -38,33 +38,25 @@
   // If the value is -1, then data will go to a random reducer 
   private int numPartitionFields;
   
-  public reduceSinkDesc() { }
+  private boolean inferNumReducers;
+  private int numReducers;
 
-  public reduceSinkDesc
-    (final java.util.ArrayList<exprNodeDesc> keyCols,
-     final java.util.ArrayList<exprNodeDesc> valueCols,
-     final int numPartitionFields,
-     final tableDesc keySerializeInfo,
-     final tableDesc valueSerializeInfo) {
-    this.keyCols = keyCols;
-    this.valueCols = valueCols;
-    this.tag = -1;
-    this.numPartitionFields = numPartitionFields;
-    this.keySerializeInfo = keySerializeInfo;
-    this.valueSerializeInfo = valueSerializeInfo;
-  }
+  public reduceSinkDesc() { }
 
   public reduceSinkDesc
     (java.util.ArrayList<exprNodeDesc> keyCols,
      java.util.ArrayList<exprNodeDesc> valueCols,
      int tag,
      int numPartitionFields,
+     int numReducers,
+     boolean inferNumReducers,
      final tableDesc keySerializeInfo,
      final tableDesc valueSerializeInfo) {
     this.keyCols = keyCols;
     this.valueCols = valueCols;
-    assert tag != -1;
     this.tag = tag;
+    this.numReducers = numReducers;
+    this.inferNumReducers = inferNumReducers;
     this.numPartitionFields = numPartitionFields;
     this.keySerializeInfo = keySerializeInfo;
     this.valueSerializeInfo = valueSerializeInfo;
@@ -104,6 +96,20 @@
     this.tag = tag;
   }
 
+  public boolean getInferNumReducers() {
+    return this.inferNumReducers;
+  }
+  public void setInferNumReducers(boolean inferNumReducers) {
+    this.inferNumReducers = inferNumReducers;
+  }
+
+  public int getNumReducers() {
+    return this.numReducers;
+  }
+  public void setNumReducers(int numReducers) {
+    this.numReducers = numReducers;
+  }
+
   public tableDesc getKeySerializeInfo() {
     return keySerializeInfo;
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/plan/tableDesc.java Tue Oct 21 11:29:18 2008
@@ -47,7 +47,7 @@
   public Class<? extends Deserializer> getDeserializerClass() {
     return this.deserializerClass;
   }
-  public void setDeserializerClass(final Class<? extends Deserializer> serdeClass) {
+  public void setDeserializerClass(final Class<? extends SerDe> serdeClass) {
     this.deserializerClass = serdeClass;
   }
   public Class<? extends InputFormat> getInputFileFormatClass() {

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/typeinfo/StructTypeInfo.java Tue Oct 21 11:29:18 2008
@@ -95,12 +95,14 @@
   }
   
   public TypeInfo getStructFieldTypeInfo(String field) {
+    String fieldLowerCase = field.toLowerCase();
     for(int i=0; i<allStructFieldNames.size(); i++) {
-      if (field.equals(allStructFieldNames.get(i))) {
+      if (fieldLowerCase.equals(allStructFieldNames.get(i))) {
         return allStructFieldTypeInfos.get(i);
       }
     }
-    throw new RuntimeException("cannot find field " + field + " in " + allStructFieldNames);
+    throw new RuntimeException("cannot find field " + field + "(lowercase form: " 
+        + fieldLowerCase + ") in " + allStructFieldNames);
     // return null;
   }
   

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFAvg.java Tue Oct 21 11:29:18 2008
@@ -38,25 +38,31 @@
   }
 
   public boolean aggregate(String o) {
-    mSum += Double.parseDouble(o);
-    mCount ++;
+    if (o != null && !o.isEmpty()) {
+      mSum += Double.parseDouble(o);
+      mCount ++;
+    }
     return true;
   }
   
   public String evaluatePartial() {
-    return new Double(mSum).toString() + '/' + Long.valueOf(mCount).toString();
+    // This is SQL standard - average of zero items should be null.
+    return mCount == 0 ? null : String.valueOf(mSum) + '/' + String.valueOf(mCount);
   }
 
   public boolean aggregatePartial(String o) {
-    int pos = o.indexOf('/');
-    assert(pos != -1);
-    mSum += Double.parseDouble(o.substring(0, pos));
-    mCount += Long.parseLong(o.substring(pos+1));
+    if (o != null && !o.isEmpty()) {
+      int pos = o.indexOf('/');
+      assert(pos != -1);
+      mSum += Double.parseDouble(o.substring(0, pos));
+      mCount += Long.parseLong(o.substring(pos+1));
+    }
     return true;
   }
 
   public String evaluate() {
-    return new Double(mSum / mCount).toString();
+    // This is SQL standard - average of zero items should be null.
+    return mCount == 0 ? null : String.valueOf(mSum / mCount);
   }
 
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFCount.java Tue Oct 21 11:29:18 2008
@@ -35,7 +35,10 @@
   }
   
   public boolean aggregate(Object o) {
-    mCount ++;
+    // Our SerDe between map/reduce boundary may convert MetadataTypedSerDe to 
+    if (o != null && !o.equals("")) {
+      mCount ++;
+    }
     return true;
   }
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMax.java Tue Oct 21 11:29:18 2008
@@ -25,6 +25,7 @@
 public class UDAFMax extends UDAF {
 
   private double mMax;
+  private boolean mEmpty;
   
   public UDAFMax() {
     super();
@@ -33,24 +34,31 @@
 
   public void init() {
     mMax = 0;
+    mEmpty = true;
   }
 
   public boolean aggregate(String o) {
-    mMax = Math.max(mMax, Double.parseDouble(o));
+    if (o != null && !o.isEmpty()) {
+      if (mEmpty) {
+        mMax = Double.parseDouble(o);
+        mEmpty = false;
+      } else {
+        mMax = Math.max(mMax, Double.parseDouble(o));
+      }
+    }
     return true;
   }
   
   public String evaluatePartial() {
-    return new Double(mMax).toString();
+    return mEmpty ? null : String.valueOf(mMax);
   }
 
   public boolean aggregatePartial(String o) {
-    mMax = Math.max(mMax, Double.parseDouble(o));
-    return true;
+    return aggregate(o);
   }
 
   public String evaluate() {
-    return new Double(mMax).toString();
+    return mEmpty ? null : String.valueOf(mMax);
   }
 
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFMin.java Tue Oct 21 11:29:18 2008
@@ -25,6 +25,7 @@
 public class UDAFMin extends UDAF {
 
   private double mMin;
+  private boolean mEmpty;
   
   public UDAFMin() {
     super();
@@ -33,24 +34,31 @@
 
   public void init() {
     mMin = 0;
+    mEmpty = true;
   }
 
   public boolean aggregate(String o) {
-    mMin = Math.min(mMin, Double.parseDouble(o));
+    if (o != null && !o.isEmpty()) {
+      if (mEmpty) {
+        mMin = Double.parseDouble(o);
+        mEmpty = false;
+      } else {
+        mMin = Math.min(mMin, Double.parseDouble(o));
+      }
+    }
     return true;
   }
   
   public String evaluatePartial() {
-    return new Double(mMin).toString();
+    return mEmpty ? null : String.valueOf(mMin);
   }
 
   public boolean aggregatePartial(String o) {
-    mMin = Math.min(mMin, Double.parseDouble(o));
-    return true;
+    return aggregate(o);
   }
 
   public String evaluate() {
-    return new Double(mMin).toString();
+    return mEmpty ? null : String.valueOf(mMin);
   }
 
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDAFSum.java Tue Oct 21 11:29:18 2008
@@ -25,6 +25,7 @@
 public class UDAFSum extends UDAF {
 
   private double mSum;
+  private boolean mEmpty;
   
   public UDAFSum() {
     super();
@@ -33,24 +34,33 @@
 
   public void init() {
     mSum = 0;
+    mEmpty = true;
   }
 
   public boolean aggregate(String o) {
-    mSum += Double.parseDouble(o);
+    if (o != null && !o.isEmpty()) {
+      mSum += Double.parseDouble(o);
+      mEmpty = false;
+    }
     return true;
   }
   
   public String evaluatePartial() {
-    return new Double(mSum).toString();
+    // This is SQL standard - sum of zero items should be null.
+    return mEmpty ? null : new Double(mSum).toString();
   }
 
   public boolean aggregatePartial(String o) {
-    mSum += Double.parseDouble(o);
+    if (o != null && !o.isEmpty()) {
+      mSum += Double.parseDouble(o);
+      mEmpty = false;
+    }
     return true;
   }
 
   public String evaluate() {
-    return new Double(mSum).toString();
+    // This is SQL standard - sum of zero items should be null.
+    return mEmpty ? null : new Double(mSum).toString();
   }
 
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFBaseCompare.java Tue Oct 21 11:29:18 2008
@@ -25,7 +25,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public abstract class UDFBaseCompare extends UDF {
+public abstract class UDFBaseCompare implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFBaseCompare.class.getName());
 
@@ -38,28 +38,26 @@
    *  String to double and the Number to double, and then compare.
    */
   public Boolean evaluate(String a, Number b)  {
-    Boolean r = null;
-    if ((a == null) || (b == null)) {
-      r = null;
-    } else {
-      r = evaluate(Double.valueOf(a), new Double(b.doubleValue()));
+    Double aDouble = null;
+    try {
+      aDouble = Double.valueOf(a);
+    } catch (Exception e){
+      // do nothing: aDouble will be null.
     }
-    // LOG.info("evaluate(" + a + "," + b + ")=" + r);
-    return r;
+    return evaluate(aDouble, new Double(b.doubleValue()));
   }
 
   /** If one of the argument is a String and the other is a Number, convert
    *  String to double and the Number to double, and then compare.
    */
   public Boolean evaluate(Number a, String b)  {
-    Boolean r = null;
-    if ((a == null) || (b == null)) {
-      r = null;
-    } else {
-      r = evaluate(new Double(a.doubleValue()), Double.valueOf(b));
+    Double bDouble = null;
+    try {
+      bDouble = Double.valueOf(b);
+    } catch (Exception e){
+      // do nothing: bDouble will be null.
     }
-    // LOG.info("evaluate(" + a + "," + b + ")=" + r);
-    return r;
+    return evaluate(new Double(a.doubleValue()), bDouble);
   }
   
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFConcat.java Tue Oct 21 11:29:18 2008
@@ -21,7 +21,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFConcat extends UDF {
+public class UDFConcat implements UDF {
 
   public UDFConcat() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDefaultSampleHashFn.java Tue Oct 21 11:29:18 2008
@@ -22,7 +22,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-public class UDFDefaultSampleHashFn extends UDF {
+public class UDFDefaultSampleHashFn implements UDF {
   protected final Log LOG;
 
   public UDFDefaultSampleHashFn() {

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLTrim.java Tue Oct 21 11:29:18 2008
@@ -24,7 +24,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFLTrim extends UDF {
+public class UDFLTrim implements UDF {
 
   public UDFLTrim() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLike.java Tue Oct 21 11:29:18 2008
@@ -24,7 +24,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFLike extends UDF {
+public class UDFLike implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFLike.class.getName());
   private String lastLikePattern = null;

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFLower.java Tue Oct 21 11:29:18 2008
@@ -22,7 +22,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFLower extends UDF {
+public class UDFLower implements UDF {
 
   public UDFLower() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPAnd.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPAnd extends UDF {
+public class UDFOPAnd implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPAnd");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitAnd.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPBitAnd extends UDF {
+public class UDFOPBitAnd implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPBitAnd.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitNot.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPBitNot extends UDF {
+public class UDFOPBitNot implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPBitNot.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitOr.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPBitOr extends UDF {
+public class UDFOPBitOr implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPBitOr");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPBitXor.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPBitXor extends UDF {
+public class UDFOPBitXor implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPBitXor.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPDivide.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPDivide extends UDF {
+public class UDFOPDivide implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPDivide");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMinus.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPMinus extends UDF {
+public class UDFOPMinus implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPMinus");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMod.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPMod extends UDF {
+public class UDFOPMod implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPMod");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPMultiply.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPMultiply extends UDF {
+public class UDFOPMultiply implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPMultiply");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNot.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPNot extends UDF {
+public class UDFOPNot implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNot");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNotNull.java Tue Oct 21 11:29:18 2008
@@ -23,23 +23,15 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPNotNull extends UDF {
+public class UDFOPNotNull implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNotNull");
 
   public UDFOPNotNull() {
   }
 
-  public Boolean evaluate(Long a)  {
-    return Boolean.valueOf(a == null ? false : true);
-  }
-
-  public Boolean evaluate(Number a)  {
-    return Boolean.valueOf(a == null ? false : true);
-  }
-
-  public Boolean evaluate(String a)  {
-    return Boolean.valueOf(a == null ? false : true);
+  public Boolean evaluate(Object a)  {
+    return Boolean.valueOf(a != null);
   }
 
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPNull.java Tue Oct 21 11:29:18 2008
@@ -23,23 +23,15 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPNull extends UDF {
+public class UDFOPNull implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPNull");
 
   public UDFOPNull() {
   }
 
-  public Boolean evaluate(Long a)  {
-    return Boolean.valueOf(a == null ? true : false);
-  }
-
-  public Boolean evaluate(Number a)  {
-    return Boolean.valueOf(a == null ? true : false);
-  }
-
-  public Boolean evaluate(String a)  {
-    return Boolean.valueOf(a == null ? true : false);
+  public Boolean evaluate(Object a)  {
+    return Boolean.valueOf(a == null);
   }
 
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPOr.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFOPOr extends UDF {
+public class UDFOPOr implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFOPOr.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFOPPlus.java Tue Oct 21 11:29:18 2008
@@ -32,7 +32,7 @@
  * The case of int + double will be handled by implicit type casting using 
  * UDFRegistry.implicitConvertable method. 
  */
-public class UDFOPPlus extends UDF {
+public class UDFOPPlus implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFOPPlus");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRTrim.java Tue Oct 21 11:29:18 2008
@@ -24,7 +24,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFRTrim extends UDF {
+public class UDFRTrim implements UDF {
 
   public UDFRTrim() {
   }
@@ -33,7 +33,7 @@
     if (s == null) {
       return null;
     }
-    return StringUtils.strip(s, " ");
+    return StringUtils.stripEnd(s, " ");
   }
 
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExp.java Tue Oct 21 11:29:18 2008
@@ -22,7 +22,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFRegExp extends UDF {
+public class UDFRegExp implements UDF {
 
   private String lastRegex = null;
   private Pattern p = null;

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFRegExpReplace.java Tue Oct 21 11:29:18 2008
@@ -22,7 +22,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFRegExpReplace extends UDF {
+public class UDFRegExpReplace implements UDF {
 
   private String lastRegex = null;
   private Pattern p = null;

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrEq.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFStrEq extends UDF {
+public class UDFStrEq implements UDF {
 
   private static Log LOG = LogFactory.getLog("org.apache.hadoop.hive.ql.udf.UDFStrEq");
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGe.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFStrGe extends UDF {
+public class UDFStrGe implements UDF {
 
   public UDFStrGe() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrGt.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFStrGt extends UDF {
+public class UDFStrGt implements UDF {
 
   public UDFStrGt() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLe.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFStrLe extends UDF {
+public class UDFStrLe implements UDF {
 
   public UDFStrLe() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLt.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLt.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLt.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrLt.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFStrLt extends UDF {
+public class UDFStrLt implements UDF {
 
   public UDFStrLt() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFStrNe.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFStrNe extends UDF {
+public class UDFStrNe implements UDF {
 
   public UDFStrNe() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSubstr.java Tue Oct 21 11:29:18 2008
@@ -21,7 +21,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFSubstr extends UDF {
+public class UDFSubstr implements UDF {
 
   public UDFSubstr() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToBoolean extends UDF {
+public class UDFToBoolean implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToBoolean.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToByte extends UDF {
+public class UDFToByte implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToByte.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDate.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToDate extends UDF {
+public class UDFToDate implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToDate.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToDouble extends UDF {
+public class UDFToDouble implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToDouble.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToFloat extends UDF {
+public class UDFToFloat implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToFloat.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToInteger extends UDF {
+public class UDFToInteger implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToInteger.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToLong extends UDF {
+public class UDFToLong implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToLong.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToString.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 import org.apache.hadoop.hive.ql.exec.UDF;
 
 
-public class UDFToString extends UDF {
+public class UDFToString implements UDF {
 
   private static Log LOG = LogFactory.getLog(UDFToString.class.getName());
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFTrim.java Tue Oct 21 11:29:18 2008
@@ -24,7 +24,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFTrim extends UDF {
+public class UDFTrim implements UDF {
 
   public UDFTrim() {
   }
@@ -33,7 +33,7 @@
     if (s == null) {
       return null;
     }
-    return StringUtils.stripEnd(s, " ");
+    return StringUtils.strip(s, " ");
   }
 
 }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFUpper.java Tue Oct 21 11:29:18 2008
@@ -22,7 +22,7 @@
 import java.util.regex.Pattern;
 import java.util.regex.Matcher;
 
-public class UDFUpper extends UDF {
+public class UDFUpper implements UDF {
 
   public UDFUpper() {
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Tue Oct 21 11:29:18 2008
@@ -53,7 +53,7 @@
 import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.thrift.Complex;
+import org.apache.hadoop.hive.serde2.thrift.test.Complex;
 import org.apache.hadoop.hive.serde.Constants;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
@@ -277,7 +277,7 @@
     Table srcThrift = new Table("src_thrift");
     srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName());
     srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
-    srcThrift.setSerializationLib(ThriftDeserializer.shortName());
+    srcThrift.setSerializationLib(ThriftDeserializer.class.getName());
     srcThrift.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
     srcThrift.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
     db.createTable(srcThrift);
@@ -364,7 +364,6 @@
     CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
 
     ss.in = System.in;
-    ss.err = System.err;
 
     File qf = new File(outDir, tname);
     File outf = null;
@@ -372,6 +371,7 @@
     outf = new File(outf, qf.getName().concat(".out"));
     FileOutputStream fo = new FileOutputStream(outf);
     ss.out = new PrintStream(fo, true, "UTF-8");
+    ss.err = ss.out;
     ss.setIsSilent(true);
     cliDriver = new CliDriver(ss);
     SessionState.start(ss);
@@ -644,7 +644,10 @@
   public List<Task<? extends Serializable>> analyzeAST(CommonTree ast) throws Exception {
 
     // Do semantic analysis and plan generation
-    sem.analyze(ast, new Context(conf));
+    Context ctx = new Context(conf);
+    ctx.makeScratchDir();
+    sem.analyze(ast, ctx);
+    ctx.removeScratchDir();
     return sem.getRootTasks();
   }
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java Tue Oct 21 11:29:18 2008
@@ -203,8 +203,7 @@
     Operator<reduceSinkDesc> op1 = OperatorFactory.get
       (PlanUtils.getReduceSinkDesc
        (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
-        Utilities.makeList(new exprNodeColumnDesc(String.class, "value")),
-        1));
+        Utilities.makeList(new exprNodeColumnDesc(String.class, "value")), -1, 1, -1, false));
 
     Utilities.addMapWork(mr, src, "a", op1);
 
@@ -228,8 +227,7 @@
       (PlanUtils.getReduceSinkDesc
        (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
         Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
-                           new exprNodeColumnDesc(String.class, "value")),
-        1));
+                           new exprNodeColumnDesc(String.class, "value")), -1, 1, -1, false));
 
     Utilities.addMapWork(mr, src, "a", op1);
 
@@ -260,8 +258,7 @@
       (PlanUtils.getReduceSinkDesc
        (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
         Utilities.makeList
-        (new exprNodeColumnDesc(String.class, "value")), Byte.valueOf((byte)0),
-        1));
+        (new exprNodeColumnDesc(String.class, "value")), Byte.valueOf((byte)0), 1, -1, false));
 
     Utilities.addMapWork(mr, src, "a", op1);
 
@@ -270,7 +267,7 @@
        (Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
         Utilities.makeList(new exprNodeColumnDesc(String.class, "key")),
         Byte.valueOf((byte)1),
-        Integer.MAX_VALUE));
+        Integer.MAX_VALUE, -1, false));
 
     Utilities.addMapWork(mr, src2, "b", op2);
 
@@ -291,7 +288,8 @@
              new exprNodeColumnDesc(TypeInfoFactory.getListTypeInfo(
                  TypeInfoFactory.getPrimitiveTypeInfo(String.class)),
                  Utilities.ReduceField.VALUE.toString()),
-             "0"))), op4);
+             "0",
+             false))), op4);
 
     mr.setReducer(op5);
   }
@@ -307,7 +305,7 @@
        (Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")),
         Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey"),
                            new exprNodeColumnDesc(String.class, "tvalue")),
-        1));
+        -1, 1, -1, false));
 
     Operator<scriptDesc> op0 = OperatorFactory.get
     (new scriptDesc("/bin/cat",
@@ -343,7 +341,7 @@
        (Utilities.makeList(new exprNodeColumnDesc(String.class, "0")),
         Utilities.makeList(new exprNodeColumnDesc(String.class, "0"),
                            new exprNodeColumnDesc(String.class, "1")),
-        1));
+        -1, 1, -1, false));
 
     Operator<selectDesc> op4 = OperatorFactory.get(new selectDesc(
                                      Utilities.makeList(new exprNodeColumnDesc(String.class, "key"),
@@ -373,7 +371,7 @@
         Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey")),
         Utilities.makeList(new exprNodeColumnDesc(String.class, "tkey"),
                            new exprNodeColumnDesc(String.class, "tvalue")),
-        1));
+        -1, 1, -1, false));
 
     Operator<scriptDesc> op0 = OperatorFactory.get
       (new scriptDesc("\'/bin/cat\'",

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Tue Oct 21 11:29:18 2008
@@ -29,7 +29,7 @@
 import org.apache.hadoop.hive.metastore.DB;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.ql.thrift.Complex;
+import org.apache.hadoop.hive.serde2.thrift.test.Complex;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
 import org.apache.hadoop.hive.serde2.ThriftDeserializer;
 import org.apache.hadoop.hive.serde.Constants;
@@ -137,7 +137,8 @@
         // now that URI is set correctly, set the original table's uri and then compare the two tables
         tbl.setDataLocation(ft.getDataLocation());
         assertTrue("Tables  doesn't match: " + tableName, ft.getTTable().equals(tbl.getTTable()));
-        assertEquals("Serde is not set correctly", tbl.getDeserializer().getShortName(), ft.getDeserializer().getShortName());
+        assertEquals("Serde is not set correctly", tbl.getDeserializer().getClass().getName(), ft.getDeserializer().getClass().getName());
+        assertEquals("SerializationLib is not set correctly", tbl.getSerializationLib(), MetadataTypedColumnsetSerDe.class.getName());
       } catch (HiveException e) {
         e.printStackTrace();
         assertTrue("Unable to fetch table correctly: " + tableName, false);
@@ -195,7 +196,8 @@
         // now that URI is set correctly, set the original table's uri and then compare the two tables
         tbl.setDataLocation(ft.getDataLocation());
         assertTrue("Tables  doesn't match: " + tableName, ft.getTTable().equals(tbl.getTTable()));
-        assertEquals("Serde is not set correctly", tbl.getDeserializer().getShortName(), ft.getDeserializer().getShortName());
+        assertEquals("SerializationLib is not set correctly", tbl.getSerializationLib(), ThriftDeserializer.class.getName());
+        assertEquals("Serde is not set correctly", tbl.getDeserializer().getClass().getName(), ft.getDeserializer().getClass().getName());
       } catch (HiveException e) {
         System.err.println(StringUtils.stringifyException(e));
         assertTrue("Unable to fetch table correctly: " + tableName, false);

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/org/apache/hadoop/hive/ql/udf/UDFTestLength.java Tue Oct 21 11:29:18 2008
@@ -23,7 +23,7 @@
 /**
  * A UDF for testing, which evaluates the length of a string.
  */
-public class UDFTestLength extends UDF {
+public class UDFTestLength implements UDF {
   public Integer evaluate(String s) {
     return s == null ? null : s.length();
   }

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input3.q Tue Oct 21 11:29:18 2008
@@ -1,3 +1,6 @@
+DROP TABLE TEST3a;
+DROP TABLE TEST3b;
+DROP TABLE TEST3c;
 CREATE TABLE TEST3a(A INT, B FLOAT); 
 DESCRIBE TEST3a; 
 CREATE TABLE TEST3b(A ARRAY<INT>, B FLOAT, C MAP<FLOAT, INT>); 
@@ -12,5 +15,9 @@
 ALTER TABLE TEST3b RENAME TO TEST3c;
 DESCRIBE TEST3c; 
 SHOW TABLES;
+EXPLAIN
+ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 FLOAT);
+ALTER TABLE TEST3c REPLACE COLUMNS (R1 INT, R2 FLOAT);
+DESCRIBE EXTENDED TEST3c;
 DROP TABLE TEST3c;
 DROP TABLE TEST3a;

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input_part1.q Tue Oct 21 11:29:18 2008
@@ -1,6 +1,6 @@
 CREATE TABLE dest1(key INT, value STRING, hr STRING, ds STRING);
 
-EXPLAIN
+EXPLAIN EXTENDED
 FROM srcpart
 INSERT OVERWRITE TABLE dest1 SELECT srcpart.key, srcpart.value, srcpart.hr, srcpart.ds WHERE srcpart.key < 100 and srcpart.ds = '2008-04-08' and srcpart.hr = '12';
 

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/input_testxpath.q Tue Oct 21 11:29:18 2008
@@ -1,10 +1,10 @@
-CREATE TABLE dest1(key INT, value STRING);
+CREATE TABLE dest1(key INT, value STRING, mapvalue STRING);
 
 EXPLAIN
 FROM src_thrift
-INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring;
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2'];
 
 FROM src_thrift
-INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring;
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2'];
 
 SELECT dest1.* FROM dest1;

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/inputddl4.q Tue Oct 21 11:29:18 2008
@@ -1,4 +1,5 @@
 -- a simple test to test sorted/clustered syntax
+DROP TABLE INPUTDDL4;
 CREATE TABLE INPUTDDL4(viewTime DATETIME, userid INT,
                        page_url STRING, referrer_url STRING, 
                        friends ARRAY<BIGINT>, properties MAP<STRING, STRING>,
@@ -7,4 +8,5 @@
     PARTITIONED BY(ds DATETIME, country STRING) 
     CLUSTERED BY(userid) SORTED BY(viewTime) INTO 32 BUCKETS;
 DESCRIBE INPUTDDL4;
+DESCRIBE EXTENDED INPUTDDL4;
 DROP TABLE INPUTDDL4;

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/clientpositive/udf2.q Tue Oct 21 11:29:18 2008
@@ -1,10 +1,8 @@
-EXPLAIN
-CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength';
-
-CREATE TEMPORARY FUNCTION testlength AS 'org.apache.hadoop.hive.ql.udf.UDFTestLength';
+CREATE TABLE dest1(c1 STRING);
 
-CREATE TABLE dest1(len INT);
+FROM src INSERT OVERWRITE TABLE dest1 SELECT '  abc  ' WHERE src.key = 86;
 
-FROM src INSERT OVERWRITE TABLE dest1 SELECT testlength(src.value);
+EXPLAIN
+SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1;
 
-SELECT dest1.* FROM dest1;
+SELECT '|', trim(dest1.c1), '|', rtrim(dest1.c1), '|', ltrim(dest1.c1), '|' FROM dest1;

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/queries/positive/input_testxpath.q Tue Oct 21 11:29:18 2008
@@ -1,2 +1,2 @@
 FROM src_thrift
-INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring
+INSERT OVERWRITE TABLE dest1 SELECT src_thrift.lint[1], src_thrift.lintstring[0].mystring, src_thrift.mstringstring['key_2']

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/cast1.q.out Tue Oct 21 11:29:18 2008
@@ -32,21 +32,20 @@
                       type: int
                 File Output Operator
                   table:
-                      name: dest1
-                      serde: simple_meta
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                      name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 5	5.0	5.0	5.0	5	false	1
-

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby1.q.out Tue Oct 21 11:29:18 2008
@@ -9,46 +9,51 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            keys:
-                  expr: VALUE.2
-                  type: string
-            mode: partial1
-          
-                expr: sum(VALUE.3)
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
-                    expr: 0
-                    type: int
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
                     expr: key
                     type: string
+              # partition fields: -1
+              tag: -1
+              value expressions:
                     expr: substr(value, 4)
                     type: string
-              # partition fields: -1
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: sum(VALUE.0)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/748679827/1407352694.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: string
       Reduce Operator Tree:
         Group By Operator
+        
+              expr: sum(VALUE.0)
           keys:
                 expr: KEY.0
                 type: string
           mode: partial2
-        
-              expr: sum(VALUE.0)
           Select Operator
             expressions:
                   expr: 0
@@ -57,31 +62,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/178504461.10001 
-          Reduce Output Operator
-            tag: -1
-            key expressions:
-                  expr: 0
-                  type: string
-            value expressions:
-                  expr: 1
-                  type: string
-            # partition fields: 1
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 0	0.0
@@ -393,4 +387,3 @@
 96	96.0
 97	194.0
 98	196.0
-

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby2.q.out Tue Oct 21 11:29:18 2008
@@ -9,46 +9,54 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            keys:
-                  expr: VALUE.2
-                  type: string
-            mode: partial1
-          
-                expr: count(DISTINCT KEY.0)
-                expr: sum(KEY.0)
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
-                    expr: substr(value, 4)
-                    type: string
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
                     expr: substr(key, 0, 1)
                     type: string
-              # partition fields: 1
+                    expr: substr(value, 4)
+                    type: string
+              # partition fields: 2147483647
+              tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: count(DISTINCT KEY.1)
+              expr: sum(KEY.1)
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/307368091/808162418.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
+            value expressions:
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
       Reduce Operator Tree:
         Group By Operator
+        
+              expr: count(VALUE.0)
+              expr: sum(VALUE.1)
           keys:
                 expr: KEY.0
                 type: string
           mode: partial2
-        
-              expr: count(VALUE.0)
-              expr: sum(VALUE.1)
           Select Operator
             expressions:
                   expr: 0
@@ -59,33 +67,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/29356866.10001 
-          Reduce Output Operator
-            tag: -1
-            key expressions:
-                  expr: 0
-                  type: string
-            value expressions:
-                  expr: 1
-                  type: string
-                  expr: 2
-                  type: string
-            # partition fields: 1
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 0	1	00.0
@@ -98,4 +93,3 @@
 7	6	7735.0
 8	8	8762.0
 9	7	91047.0
-

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby3.q.out Tue Oct 21 11:29:18 2008
@@ -9,44 +9,55 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            mode: partial1
-          
-                expr: avg(DISTINCT KEY.0)
-                expr: sum(KEY.0)
-                expr: avg(KEY.0)
-                expr: min(KEY.0)
-                expr: max(KEY.0)
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
                     expr: substr(value, 4)
                     type: string
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
-              # partition fields: 1
+              # partition fields: 2147483647
+              tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+        
+              expr: avg(DISTINCT KEY.0)
+              expr: sum(KEY.0)
+              expr: avg(KEY.0)
+              expr: min(KEY.0)
+              expr: max(KEY.0)
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/629715569/118113569.10001 
+          Reduce Output Operator
+            # partition fields: 0
+            tag: -1
+            value expressions:
+                  expr: 0
+                  type: string
+                  expr: 1
+                  type: string
+                  expr: 2
+                  type: string
+                  expr: 3
+                  type: string
+                  expr: 4
+                  type: string
       Reduce Operator Tree:
         Group By Operator
-          mode: partial2
         
               expr: avg(VALUE.0)
               expr: sum(VALUE.1)
               expr: avg(VALUE.2)
               expr: min(VALUE.3)
               expr: max(VALUE.4)
+          mode: partial2
           Select Operator
             expressions:
                   expr: 1
@@ -61,37 +72,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/52918796.10001 
-          Reduce Output Operator
-            tag: -1
-            value expressions:
-                  expr: 0
-                  type: string
-                  expr: 1
-                  type: string
-                  expr: 2
-                  type: string
-                  expr: 3
-                  type: string
-                  expr: 4
-                  type: string
-            # partition fields: 0
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 130091.0	260.182	256.10355987055016	498.0	0.0
-

Modified: hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out?rev=706708&r1=706707&r2=706708&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out (original)
+++ hadoop/core/branches/branch-0.19/src/contrib/hive/ql/src/test/results/clientpositive/groupby4.q.out Tue Oct 21 11:29:18 2008
@@ -9,34 +9,35 @@
 STAGE PLANS:
   Stage: Stage-1
     Map Reduce
-      Reduce Operator Tree:
-          Group By Operator
-            keys:
-                  expr: VALUE.2
-                  type: string
-            mode: partial1
-            File Output Operator
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
       Alias -> Map Operator Tree:
         src 
             Reduce Output Operator
-              tag: -1
               key expressions:
-                    expr: 0
-                    type: int
-              value expressions:
-                    expr: key
-                    type: string
-                    expr: value
-                    type: string
                     expr: substr(key, 0, 1)
                     type: string
               # partition fields: -1
+              tag: -1
+      Reduce Operator Tree:
+        Group By Operator
+          keys:
+                expr: KEY.0
+                type: string
+          mode: partial1
+          File Output Operator
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
 
   Stage: Stage-2
     Map Reduce
+      Alias -> Map Operator Tree:
+        /tmp/hive-njain/1561965178/525265780.10001 
+          Reduce Output Operator
+            key expressions:
+                  expr: 0
+                  type: string
+            # partition fields: 1
+            tag: -1
       Reduce Operator Tree:
         Group By Operator
           keys:
@@ -49,28 +50,20 @@
                   type: string
             File Output Operator
               table:
-                  name: dest1
-                  serde: simple_meta
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-      Alias -> Map Operator Tree:
-        /tmp/hive-zshao/64182502.10001 
-          Reduce Output Operator
-            tag: -1
-            key expressions:
-                  expr: 0
-                  type: string
-            # partition fields: 1
+                  serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                  name: dest1
 
   Stage: Stage-0
     Move Operator
       tables:
+            replace:
             table:
-                name: dest1
-                serde: simple_meta
                 input format: org.apache.hadoop.mapred.TextInputFormat
                 output format: org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat
-            replace:
+                serde: org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe
+                name: dest1
 
 
 0
@@ -83,4 +76,3 @@
 7
 8
 9
-