You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/11/26 09:19:34 UTC

svn commit: r1545564 [6/22] - in /hive/branches/tez: ./ ant/ beeline/ bin/ cli/ common/ common/src/java/org/apache/hadoop/hive/common/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/java/org/apache/hadoop/hive/conf/ common/src/test/org/...

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java Tue Nov 26 08:19:25 2013
@@ -23,12 +23,15 @@ import static org.junit.Assert.assertFal
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
+import java.lang.management.ManagementFactory;
+import java.lang.management.MemoryMXBean;
 import java.lang.reflect.Constructor;
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -155,6 +158,88 @@ public class TestVectorGroupByOperator {
 
     return desc;
   }
+  
+  long outputRowCount = 0;
+  
+  @Test
+  public void testMemoryPressureFlush() throws HiveException {
+
+    Map<String, Integer> mapColumnNames = new HashMap<String, Integer>();
+    mapColumnNames.put("Key", 0);
+    mapColumnNames.put("Value", 1);
+    VectorizationContext ctx = new VectorizationContext(mapColumnNames, 2);
+
+    GroupByDesc desc = buildKeyGroupByDesc (ctx, "max", 
+        "Value", TypeInfoFactory.longTypeInfo, 
+        "Key", TypeInfoFactory.longTypeInfo);
+    
+    // Set the memory treshold so that we get 100Kb before we need to flush.
+    MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
+    long maxMemory = memoryMXBean.getHeapMemoryUsage().getMax();
+    
+    float treshold = 100.0f*1024.0f/maxMemory;
+    desc.setMemoryThreshold(treshold);
+
+    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);
+    
+    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
+    vgo.initialize(null, null);
+    
+    this.outputRowCount = 0;
+    out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
+      @Override
+      public void inspectRow(Object row, int tag) throws HiveException {
+        ++outputRowCount;
+      }
+    });
+          
+    Iterable<Object> it = new Iterable<Object>() {
+      @Override
+      public Iterator<Object> iterator() {
+        return new Iterator<Object> () {
+          long value = 0;
+
+          @Override
+          public boolean hasNext() {
+            return true;
+          }
+
+          @Override
+          public Object next() {
+            return ++value;
+          }
+
+          @Override
+          public void remove() {
+          }
+        };
+      }
+    };
+    
+    FakeVectorRowBatchFromObjectIterables data = new FakeVectorRowBatchFromObjectIterables(
+        100,
+        new String[] {"long", "long"},
+        it,
+        it);
+
+    // The 'it' data source will produce data w/o ever ending
+    // We want to see that memory pressure kicks in and some 
+    // entries in the VGBY are flushed.
+    long countRowsProduced = 0;
+    for (VectorizedRowBatch unit: data) {
+      countRowsProduced += 100;
+      vgo.processOp(unit,  0);
+      if (0 < outputRowCount) {
+        break;
+      }
+      // Set an upper bound how much we're willing to push before it should flush
+      // we've set the memory treshold at 100kb, each key is distinct
+      // It should not go beyond 100k/16 (key+data)
+      assertTrue(countRowsProduced < 100*1024/16);
+    }
+    
+    assertTrue(0 < outputRowCount);
+  }
 
   @Test
   public void testMultiKeyIntStringInt() throws HiveException {
@@ -1485,7 +1570,7 @@ public class TestVectorGroupByOperator {
     }.init(aggregateName, expected, keys));
 
     for (VectorizedRowBatch unit: data) {
-      vgo.process(unit,  0);
+      vgo.processOp(unit,  0);
     }
     vgo.close(false);
 
@@ -1595,7 +1680,7 @@ public class TestVectorGroupByOperator {
     }.init(aggregateName, expected, keys));
 
     for (VectorizedRowBatch unit: data) {
-      vgo.process(unit,  0);
+      vgo.processOp(unit,  0);
     }
     vgo.close(false);
 
@@ -1885,7 +1970,7 @@ public class TestVectorGroupByOperator {
     vgo.initialize(null, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.process(unit,  0);
+      vgo.processOp(unit,  0);
     }
     vgo.close(false);
 
@@ -1916,7 +2001,7 @@ public class TestVectorGroupByOperator {
     vgo.initialize(null, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.process(unit,  0);
+      vgo.processOp(unit,  0);
     }
     vgo.close(false);
 
@@ -1947,7 +2032,7 @@ public class TestVectorGroupByOperator {
     vgo.initialize(null, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.process(unit,  0);
+      vgo.processOp(unit,  0);
     }
     vgo.close(false);
 
@@ -1977,7 +2062,7 @@ public class TestVectorGroupByOperator {
     vgo.initialize(null, null);
 
     for (VectorizedRowBatch unit: data) {
-      vgo.process(unit,  0);
+      vgo.processOp(unit,  0);
     }
     vgo.close(false);
 
@@ -2046,7 +2131,7 @@ public class TestVectorGroupByOperator {
     }.init(aggregateName, expected, keys));
 
     for (VectorizedRowBatch unit: data) {
-      vgo.process(unit,  0);
+      vgo.processOp(unit,  0);
     }
     vgo.close(false);
 
@@ -2113,7 +2198,7 @@ public class TestVectorGroupByOperator {
     }.init(aggregateName, expected, keys));
 
     for (VectorizedRowBatch unit: data) {
-      vgo.process(unit,  0);
+      vgo.processOp(unit,  0);
     }
     vgo.close(false);
 

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorSelectOperator.java Tue Nov 26 08:19:25 2013
@@ -32,9 +32,8 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.SelectDesc;
-import org.apache.hadoop.hive.ql.udf.UDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
@@ -95,7 +94,8 @@ public class TestVectorSelectOperator {
     ExprNodeColumnDesc colDesc2 = new ExprNodeColumnDesc(Long.class, "b", "table", false);
     ExprNodeColumnDesc colDesc3 = new ExprNodeColumnDesc(Long.class, "c", "table", false);
     ExprNodeGenericFuncDesc plusDesc = new ExprNodeGenericFuncDesc();
-    GenericUDF gudf = new GenericUDFBridge("+", true, UDFOPPlus.class.getCanonicalName());
+    GenericUDF gudf = new GenericUDFOPPlus();
+
     plusDesc.setGenericUDF(gudf);
     List<ExprNodeDesc> children = new  ArrayList<ExprNodeDesc>();
     children.add(colDesc1);

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Tue Nov 26 08:19:25 2013
@@ -85,11 +85,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.udf.UDFLTrim;
 import org.apache.hadoop.hive.ql.udf.UDFLog;
-import org.apache.hadoop.hive.ql.udf.UDFOPMinus;
-import org.apache.hadoop.hive.ql.udf.UDFOPMod;
-import org.apache.hadoop.hive.ql.udf.UDFOPMultiply;
 import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
-import org.apache.hadoop.hive.ql.udf.UDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.UDFPower;
 import org.apache.hadoop.hive.ql.udf.UDFSin;
 import org.apache.hadoop.hive.ql.udf.UDFYear;
@@ -102,11 +98,15 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMinus;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMod;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMultiply;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFRound;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPPlus;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
@@ -159,11 +159,11 @@ public class TestVectorizationContext {
      * Create original expression tree for following
      * (plus (minus (plus col1 col2) col3) (multiply col4 (mod col5 col6)) )
      */
-    GenericUDFBridge udf1 = new GenericUDFBridge("+", true, UDFOPPlus.class.getCanonicalName());
-    GenericUDFBridge udf2 = new GenericUDFBridge("-", true, UDFOPMinus.class.getCanonicalName());
-    GenericUDFBridge udf3 = new GenericUDFBridge("*", true, UDFOPMultiply.class.getCanonicalName());
-    GenericUDFBridge udf4 = new GenericUDFBridge("+", true, UDFOPPlus.class.getCanonicalName());
-    GenericUDFBridge udf5 = new GenericUDFBridge("%", true, UDFOPMod.class.getCanonicalName());
+    GenericUDFOPPlus udf1 = new GenericUDFOPPlus();
+    GenericUDFOPMinus udf2 = new GenericUDFOPMinus();
+    GenericUDFOPMultiply udf3 = new GenericUDFOPMultiply();
+    GenericUDFOPPlus udf4 = new GenericUDFOPPlus();
+    GenericUDFOPMod udf5 = new GenericUDFOPMod();
 
     ExprNodeGenericFuncDesc sumExpr = new ExprNodeGenericFuncDesc();
     sumExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
@@ -224,7 +224,6 @@ public class TestVectorizationContext {
 
     //Generate vectorized expression
     VectorizationContext vc = new VectorizationContext(columnMap, 6);
-
     VectorExpression ve = vc.getVectorExpression(sumExpr, VectorExpressionDescriptor.Mode.PROJECTION);
 
     //Verify vectorized expression
@@ -232,6 +231,7 @@ public class TestVectorizationContext {
     assertEquals(2, ve.getChildExpressions().length);
     VectorExpression childExpr1 = ve.getChildExpressions()[0];
     VectorExpression childExpr2 = ve.getChildExpressions()[1];
+    System.out.println(ve.toString());
     assertEquals(6, ve.getOutputColumn());
 
     assertTrue(childExpr1 instanceof LongColSubtractLongColumn);
@@ -300,7 +300,7 @@ public class TestVectorizationContext {
     ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Float.class, "col1", "table", false);
     ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10));
 
-    GenericUDFBridge udf = new GenericUDFBridge("+", false, UDFOPPlus.class.getCanonicalName());
+    GenericUDFOPPlus udf = new GenericUDFOPPlus();
     ExprNodeGenericFuncDesc exprDesc = new ExprNodeGenericFuncDesc();
     exprDesc.setGenericUDF(udf);
 
@@ -571,7 +571,7 @@ public class TestVectorizationContext {
   @Test
   public void testVectorizeScalarColumnExpression() throws HiveException {
     ExprNodeGenericFuncDesc scalarMinusConstant = new ExprNodeGenericFuncDesc();
-    GenericUDF gudf = new GenericUDFBridge("-", true, UDFOPMinus.class.getCanonicalName());
+    GenericUDFOPMinus gudf = new GenericUDFOPMinus();
     scalarMinusConstant.setGenericUDF(gudf);
     List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(2);
     ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, 20);

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java Tue Nov 26 08:19:25 2013
@@ -20,10 +20,12 @@ package org.apache.hadoop.hive.ql.exec.v
 
 
 import java.sql.Timestamp;
+import java.util.ArrayList;
 import java.util.Random;
 
 import junit.framework.Assert;
 
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -34,11 +36,18 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
@@ -73,6 +82,11 @@ public class TestVectorExpressionWriters
   private Writable getWritableValue(TypeInfo ti, byte[] value) {
     if (ti.equals(TypeInfoFactory.stringTypeInfo)) {
       return new Text(value);
+    } else if (ti.equals(TypeInfoFactory.varcharTypeInfo)) {
+      return new HiveVarcharWritable(
+          new HiveVarchar(new Text(value).toString(), -1));
+    } else if (ti.equals(TypeInfoFactory.binaryTypeInfo)) {
+      return new BytesWritable(value);
     }
     return null;
   }
@@ -112,6 +126,25 @@ public class TestVectorExpressionWriters
       }
     }
   }
+  
+  private void testSetterDouble(TypeInfo type) throws HiveException {
+    DoubleColumnVector dcv = VectorizedRowGroupGenUtil.generateDoubleColumnVector(true, false,
+        this.vectorSize, new Random(10));
+    dcv.isNull[2] = true;
+    Object[] values = new Object[this.vectorSize];
+    
+    VectorExpressionWriter vew = getWriter(type);
+    for (int i = 0; i < vectorSize; i++) {
+      values[i] = vew.initValue(null);
+      values[i] = vew.setValue(values[i], dcv, i);
+      if (values[i] != null) {
+        Writable expected = getWritableValue(type, dcv.vector[i]);
+        Assert.assertEquals(expected, values[i]);
+      } else {
+        Assert.assertTrue(dcv.isNull[i]);
+      }
+    }
+  }  
 
   private void testWriterLong(TypeInfo type) throws HiveException {
     LongColumnVector lcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false,
@@ -135,8 +168,89 @@ public class TestVectorExpressionWriters
       }
     }
   }
+  
+  private void testSetterLong(TypeInfo type) throws HiveException {
+    LongColumnVector lcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false,
+        vectorSize, new Random(10));
+    lcv.isNull[3] = true;
 
-  private void testWriterBytes(TypeInfo type) throws HiveException {
+    Object[] values = new Object[this.vectorSize];
+    
+    VectorExpressionWriter vew = getWriter(type);
+    for (int i = 0; i < vectorSize; i++) {
+      values[i] = vew.initValue(null);
+      values[i] = vew.setValue(values[i], lcv, i);
+      if (values[i] != null) {
+        Writable expected = getWritableValue(type, lcv.vector[i]);
+        if (expected instanceof TimestampWritable) {
+          TimestampWritable t1 = (TimestampWritable) expected;
+          TimestampWritable t2 = (TimestampWritable) values[i];
+          Assert.assertTrue(t1.getNanos() == t2.getNanos());
+          Assert.assertTrue(t1.getSeconds() == t2.getSeconds());
+          continue;
+        }
+        Assert.assertEquals(expected, values[i]);
+      } else {
+        Assert.assertTrue(lcv.isNull[i]);
+      }
+    }
+  }
+  
+  private StructObjectInspector genStructOI() {
+    ArrayList<String> fieldNames1 = new ArrayList<String>();
+    fieldNames1.add("theInt");
+    fieldNames1.add("theBool");
+    ArrayList<ObjectInspector> fieldObjectInspectors1 = new ArrayList<ObjectInspector>();
+    fieldObjectInspectors1
+        .add(PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+    fieldObjectInspectors1
+        .add(PrimitiveObjectInspectorFactory.writableBooleanObjectInspector);
+    return ObjectInspectorFactory
+        .getStandardStructObjectInspector(fieldNames1, fieldObjectInspectors1);
+  }
+  
+  private void testStructLong(TypeInfo type) throws HiveException {
+    LongColumnVector icv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false,
+        vectorSize, new Random(10));
+    icv.isNull[3] = true;
+
+    LongColumnVector bcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false,
+        vectorSize, new Random(10));
+    bcv.isNull[2] = true;
+    
+    ArrayList<Object>[] values = (ArrayList<Object>[]) new ArrayList[this.vectorSize];
+    
+    StructObjectInspector soi = genStructOI();
+    
+    VectorExpressionWriter[] vew = VectorExpressionWriterFactory.getExpressionWriters(soi);
+    
+    for (int i = 0; i < vectorSize; i++) {
+      values[i] = new ArrayList<Object>(2);
+      values[i].add(null);
+      values[i].add(null);
+
+      vew[0].setValue(values[i], icv, i);
+      vew[1].setValue(values[i], bcv, i);
+      
+      Object theInt = values[i].get(0);
+      if (theInt == null) {
+        Assert.assertTrue(icv.isNull[i]);
+      } else {
+        IntWritable w = (IntWritable) theInt;
+        Assert.assertEquals((int) icv.vector[i], w.get());
+      }
+
+      Object theBool = values[i].get(1);
+      if (theBool == null) {
+        Assert.assertTrue(bcv.isNull[i]);
+      } else {
+        BooleanWritable w = (BooleanWritable) theBool;
+        Assert.assertEquals(bcv.vector[i] == 0 ? false : true, w.get());
+      }
+    }
+  }
+
+  private void testWriterText(TypeInfo type) throws HiveException {
     Text t1 = new Text("alpha");
     Text t2 = new Text("beta");
     BytesColumnVector bcv = new BytesColumnVector(vectorSize);
@@ -160,36 +274,100 @@ public class TestVectorExpressionWriters
       }
     }
   }
-
+  
+  private void testSetterText(TypeInfo type) throws HiveException {
+    Text t1 = new Text("alpha");
+    Text t2 = new Text("beta");
+    BytesColumnVector bcv = new BytesColumnVector(vectorSize);
+    bcv.noNulls = false;
+    bcv.initBuffer();
+    bcv.setVal(0, t1.getBytes(), 0, t1.getLength());
+    bcv.isNull[1] = true;
+    bcv.setVal(2, t2.getBytes(), 0, t2.getLength());
+    bcv.isNull[3] = true;
+    bcv.setVal(4, t1.getBytes(), 0, t1.getLength());
+    
+    Object[] values = new Object[this.vectorSize];
+    VectorExpressionWriter vew = getWriter(type);
+    for (int i = 0; i < vectorSize; i++) {
+      values[i] = vew.initValue(null);
+      Writable w = (Writable) vew.setValue(values[i], bcv, i);
+      if (w != null) {
+        byte [] val = new byte[bcv.length[i]];
+        System.arraycopy(bcv.vector[i], bcv.start[i], val, 0, bcv.length[i]);
+        Writable expected = getWritableValue(type, val);
+        Assert.assertEquals(expected, w);
+      } else {
+        Assert.assertTrue(bcv.isNull[i]);
+      }
+    }
+  }
+  
   @Test
   public void testVectorExpressionWriterDouble() throws HiveException {
     testWriterDouble(TypeInfoFactory.doubleTypeInfo);
   }
 
   @Test
+  public void testVectorExpressionSetterDouble() throws HiveException {
+    testSetterDouble(TypeInfoFactory.doubleTypeInfo);
+  }  
+
+  @Test
   public void testVectorExpressionWriterFloat() throws HiveException {
     testWriterDouble(TypeInfoFactory.floatTypeInfo);
   }
 
   @Test
+  public void testVectorExpressionSetterFloat() throws HiveException {
+    testSetterDouble(TypeInfoFactory.floatTypeInfo);
+  }
+  
+  @Test
   public void testVectorExpressionWriterLong() throws HiveException {
     testWriterLong(TypeInfoFactory.longTypeInfo);
   }
-
+  
+  @Test
+  public void testVectorExpressionSetterLong() throws HiveException {
+    testSetterLong(TypeInfoFactory.longTypeInfo);
+  }
+  
+  @Test
+  public void testVectorExpressionStructLong() throws HiveException {
+    testStructLong(TypeInfoFactory.longTypeInfo);
+  }
+  
   @Test
   public void testVectorExpressionWriterInt() throws HiveException {
     testWriterLong(TypeInfoFactory.intTypeInfo);
   }
 
   @Test
+  public void testVectorExpressionSetterInt() throws HiveException {
+    testSetterLong(TypeInfoFactory.intTypeInfo);
+  }
+
+  @Test
   public void testVectorExpressionWriterShort() throws HiveException {
     testWriterLong(TypeInfoFactory.shortTypeInfo);
   }
 
   @Test
+  public void testVectorExpressionSetterShort() throws HiveException {
+    testSetterLong(TypeInfoFactory.shortTypeInfo);
+  }
+
+  
+  @Test
   public void testVectorExpressionWriterBoolean() throws HiveException {
     testWriterLong(TypeInfoFactory.booleanTypeInfo);
   }
+  
+  @Test
+  public void testVectorExpressionSetterBoolean() throws HiveException {
+    testSetterLong(TypeInfoFactory.booleanTypeInfo);
+  }
 
   @Test
   public void testVectorExpressionWriterTimestamp() throws HiveException {
@@ -197,12 +375,47 @@ public class TestVectorExpressionWriters
   }
 
   @Test
-  public void testVectorExpressionWriterBye() throws HiveException {
+  public void testVectorExpressionSetterTimestamp() throws HiveException {
+    testSetterLong(TypeInfoFactory.timestampTypeInfo);
+  }
+  
+  @Test
+  public void testVectorExpressionWriterByte() throws HiveException {
     testWriterLong(TypeInfoFactory.byteTypeInfo);
   }
+  
+  @Test
+  public void testVectorExpressionSetterByte() throws HiveException {
+    testSetterLong(TypeInfoFactory.byteTypeInfo);
+  }
 
   @Test
-  public void testVectorExpressionWriterBytes() throws HiveException {
-    testWriterBytes(TypeInfoFactory.stringTypeInfo);
+  public void testVectorExpressionWriterString() throws HiveException {
+    testWriterText(TypeInfoFactory.stringTypeInfo);
+  }
+  
+  @Test
+  public void testVectorExpressionSetterString() throws HiveException {
+    testSetterText(TypeInfoFactory.stringTypeInfo);
+  }
+  
+  @Test
+  public void testVectorExpressionWriterVarchar() throws HiveException {
+    testWriterText(TypeInfoFactory.varcharTypeInfo);
+  }
+  
+  @Test
+  public void testVectorExpressionSetterVarchar() throws HiveException {
+    testSetterText(TypeInfoFactory.varcharTypeInfo);
+  }    
+
+  @Test
+  public void testVectorExpressionWriterBinary() throws HiveException {
+    testWriterText(TypeInfoFactory.binaryTypeInfo);
+  }
+  
+  @Test
+  public void testVectorExpressionSetterBinary() throws HiveException {
+    testSetterText(TypeInfoFactory.binaryTypeInfo);
   }
 }

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/TestRCFile.java Tue Nov 26 08:19:25 2013
@@ -135,9 +135,11 @@ public class TestRCFile {
       patialS.set(2, new BytesRefWritable("789".getBytes("UTF-8")));
       patialS.set(3, new BytesRefWritable("1000".getBytes("UTF-8")));
       patialS.set(4, new BytesRefWritable("NULL".getBytes("UTF-8")));
-      patialS.set(5, new BytesRefWritable("NULL".getBytes("UTF-8")));
+      // LazyString has no so-called NULL sequence. The value is empty string if not.
+      patialS.set(5, new BytesRefWritable("".getBytes("UTF-8")));
       patialS.set(6, new BytesRefWritable("NULL".getBytes("UTF-8")));
-      patialS.set(7, new BytesRefWritable("NULL".getBytes("UTF-8")));
+      // LazyString has no so-called NULL sequence. The value is empty string if not.
+      patialS.set(7, new BytesRefWritable("".getBytes("UTF-8")));
 
     } catch (UnsupportedEncodingException e) {
       throw new RuntimeException(e);

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Tue Nov 26 08:19:25 2013
@@ -268,7 +268,7 @@ public class TestOrcFile {
         + "binary,string1:string,middle:struct<list:array<struct<int1:int,"
         + "string1:string>>>,list:array<struct<int1:int,string1:string>>,"
         + "map:map<string,struct<int1:int,string1:string>>,ts:timestamp,"
-        + "decimal1:decimal(65,30)>", readerInspector.getTypeName());
+        + "decimal1:decimal(38,18)>", readerInspector.getTypeName());
     List<? extends StructField> fields = readerInspector
         .getAllStructFieldRefs();
     BooleanObjectInspector bo = (BooleanObjectInspector) readerInspector
@@ -1030,7 +1030,7 @@ public class TestOrcFile {
     synchronized (TestOrcFile.class) {
       inspector = OrcStruct.createObjectInspector(0, types);
     }
-    HiveDecimal maxValue = HiveDecimal.create("100000000000000000000");
+    HiveDecimal maxValue = HiveDecimal.create("10000000000000000000");
     Writer writer = OrcFile.createWriter(testFilePath,
                                          OrcFile.writerOptions(conf)
                                          .inspector(inspector)
@@ -1062,7 +1062,7 @@ public class TestOrcFile {
     writer.addRow(row);
     union.set((byte) 0, new IntWritable(200000));
     row.setFieldValue(0, Timestamp.valueOf("1900-01-01 00:00:00"));
-    value = HiveDecimal.create("100000000000000000000");
+    value = HiveDecimal.create("10000000000000000000");
     row.setFieldValue(2, value);
     writer.addRow(row);
     Random rand = new Random(42);
@@ -1073,8 +1073,8 @@ public class TestOrcFile {
       } else {
         union.set((byte) 1, new Text(new Integer(i*i).toString()));
       }
-      value = HiveDecimal.create(new BigInteger(104, rand),
-          rand.nextInt(28));
+      value = HiveDecimal.create(new BigInteger(64, rand),
+          rand.nextInt(18));
       row.setFieldValue(2, value);
       if (maxValue.compareTo(value) < 0) {
         maxValue = value;
@@ -1128,7 +1128,7 @@ public class TestOrcFile {
     row = (OrcStruct) rows.next(null);
     assertEquals(1, rows.getRowNumber());
     inspector = reader.getObjectInspector();
-    assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(65,30)>",
+    assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(38,18)>",
         inspector.getTypeName());
     assertEquals(Timestamp.valueOf("2000-03-12 15:00:00"),
         row.getFieldValue(0));
@@ -1162,7 +1162,7 @@ public class TestOrcFile {
     assertEquals(Timestamp.valueOf("1900-01-01 00:00:00"),
         row.getFieldValue(0));
     assertEquals(new IntWritable(200000), union.getObject());
-    assertEquals(HiveDecimal.create("100000000000000000000"),
+    assertEquals(HiveDecimal.create("10000000000000000000"),
                  row.getFieldValue(2));
     rand = new Random(42);
     for(int i=1900; i < 2200; ++i) {
@@ -1176,8 +1176,8 @@ public class TestOrcFile {
         assertEquals(1, union.getTag());
         assertEquals(new Text(new Integer(i*i).toString()), union.getObject());
       }
-      assertEquals(HiveDecimal.create(new BigInteger(104, rand),
-                                   rand.nextInt(28)), row.getFieldValue(2));
+      assertEquals(HiveDecimal.create(new BigInteger(64, rand),
+                                   rand.nextInt(18)), row.getFieldValue(2));
     }
     for(int i=0; i < 5000; ++i) {
       row = (OrcStruct) rows.next(row);

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/zookeeper/TestZookeeperLockManager.java Tue Nov 26 08:19:25 2013
@@ -27,6 +27,7 @@ import org.apache.hadoop.hive.ql.lockmgr
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooKeeper;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -81,4 +82,19 @@ public class TestZookeeperLockManager {
     verify(zooKeeper).getChildren(PARENT_LOCK_PATH, false);
     verifyNoMoreInteractions(zooKeeper);
   }
+
+  @Test
+  public void testGetQuorumServers() {
+    conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, "node1");
+    conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT, "9999");
+    Assert.assertEquals("node1:9999", ZooKeeperHiveLockManager.getQuorumServers(conf));
+
+    conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, "node1,node2,node3");
+    conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT, "9999");
+    Assert.assertEquals("node1:9999,node2:9999,node3:9999", ZooKeeperHiveLockManager.getQuorumServers(conf));
+
+    conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM, "node1:5666,node2,node3");
+    conf.setVar(HiveConf.ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT, "9999");
+    Assert.assertEquals("node1:5666,node2:9999,node3:9999", ZooKeeperHiveLockManager.getQuorumServers(conf));
+  }
 }

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/parse/TestHiveDecimalParse.java Tue Nov 26 08:19:25 2013
@@ -61,7 +61,7 @@ public class TestHiveDecimalParse {
     int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
-        driver.getErrorMsg().contains("Decimal precision out of allowed range [1,65]"));
+        driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]"));
   }
 
   @Test
@@ -72,7 +72,7 @@ public class TestHiveDecimalParse {
     int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
-        driver.getErrorMsg().contains("Decimal precision out of allowed range [1,65]"));
+        driver.getErrorMsg().contains("Decimal precision out of allowed range [1,38]"));
   }
 
   @Test
@@ -83,7 +83,7 @@ public class TestHiveDecimalParse {
     int rc = driver.compile(query);
     Assert.assertTrue("Got " + rc + ", expected not zero", rc != 0);
     Assert.assertTrue(driver.getErrorMsg(),
-        driver.getErrorMsg().contains("Decimal scale out of allowed range [0,30]"));
+        driver.getErrorMsg().contains("Decimal scale must be less than or equal to precision"));
   }
 
   @Test

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/testutil/OperatorTestUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/testutil/OperatorTestUtils.java?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/testutil/OperatorTestUtils.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/testutil/OperatorTestUtils.java Tue Nov 26 08:19:25 2013
@@ -49,7 +49,7 @@ public class OperatorTestUtils {
       InspectableObject [] sourceData, InspectableObject [] expected) throws HiveException {
     InspectableObject resultRef = new InspectableObject();
     for (int i = 0; i < sourceData.length; i++) {
-      selectOp.process(sourceData[i].o, 0);
+      selectOp.processOp(sourceData[i].o, 0);
       collectOp.retrieve(resultRef);
       StructObjectInspector expectedOi = (StructObjectInspector) expected[i].oi;
       List<? extends StructField> expectedFields = expectedOi.getAllStructFieldRefs();

Modified: hive/branches/tez/ql/src/test/queries/clientnegative/stats_aggregator_error_1.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientnegative/stats_aggregator_error_1.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientnegative/stats_aggregator_error_1.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientnegative/stats_aggregator_error_1.q Tue Nov 26 08:19:25 2013
@@ -6,7 +6,7 @@
 
 create table tmptable(key string, value string);
 
-set hive.stats.dbclass=dummy;
+set hive.stats.dbclass=custom;
 set hive.stats.default.publisher=org.apache.hadoop.hive.ql.stats.DummyStatsPublisher;
 set hive.stats.default.aggregator=org.apache.hadoop.hive.ql.stats.DummyStatsAggregator;
 set hive.test.dummystats.aggregator=connect;

Modified: hive/branches/tez/ql/src/test/queries/clientnegative/stats_aggregator_error_2.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientnegative/stats_aggregator_error_2.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientnegative/stats_aggregator_error_2.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientnegative/stats_aggregator_error_2.q Tue Nov 26 08:19:25 2013
@@ -5,7 +5,7 @@
 
 create table tmptable(key string, value string);
 
-set hive.stats.dbclass=dummy;
+set hive.stats.dbclass=custom;
 set hive.stats.default.publisher=org.apache.hadoop.hive.ql.stats.DummyStatsPublisher;
 set hive.stats.default.aggregator="";
 

Modified: hive/branches/tez/ql/src/test/queries/clientnegative/stats_publisher_error_1.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientnegative/stats_publisher_error_1.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientnegative/stats_publisher_error_1.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientnegative/stats_publisher_error_1.q Tue Nov 26 08:19:25 2013
@@ -6,7 +6,7 @@
 
 create table tmptable(key string, value string);
 
-set hive.stats.dbclass=dummy;
+set hive.stats.dbclass=custom;
 set hive.stats.default.publisher=org.apache.hadoop.hive.ql.stats.DummyStatsPublisher;
 set hive.stats.default.aggregator=org.apache.hadoop.hive.ql.stats.DummyStatsAggregator;
 set hive.test.dummystats.publisher=connect;

Modified: hive/branches/tez/ql/src/test/queries/clientnegative/stats_publisher_error_2.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientnegative/stats_publisher_error_2.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientnegative/stats_publisher_error_2.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientnegative/stats_publisher_error_2.q Tue Nov 26 08:19:25 2013
@@ -5,7 +5,7 @@
 
 create table tmptable(key string, value string);
 
-set hive.stats.dbclass=dummy;
+set hive.stats.dbclass=custom;
 set hive.stats.default.publisher="";
 set hive.stats.default.aggregator=org.apache.hadoop.hive.ql.stats.DummyStatsAggregator;
 

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/decimal_1.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/decimal_1.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/decimal_1.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/decimal_1.q Tue Nov 26 08:19:25 2013
@@ -1,12 +1,14 @@
 set hive.fetch.task.conversion=more;
 
-drop table decimal_1;
+drop table if exists decimal_1;
 
-create table decimal_1 (t decimal(4,2));
+create table decimal_1 (t decimal(4,2), u decimal(5), v decimal);
 alter table decimal_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe';
 
+desc decimal_1;
+
 insert overwrite table decimal_1
-  select cast('17.29' as decimal(4,2)) from src tablesample (1 rows);
+  select cast('17.29' as decimal(4,2)), 3.1415926BD, 3115926.54321BD from src tablesample (1 rows);
 select cast(t as boolean) from decimal_1;
 select cast(t as tinyint) from decimal_1;
 select cast(t as smallint) from decimal_1;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/decimal_3.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/decimal_3.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/decimal_3.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/decimal_3.q Tue Nov 26 08:19:25 2013
@@ -1,6 +1,6 @@
 DROP TABLE IF EXISTS DECIMAL_3;
 
-CREATE TABLE DECIMAL_3(key decimal(65,30), value int) 
+CREATE TABLE DECIMAL_3(key decimal(38,18), value int)
 ROW FORMAT DELIMITED
    FIELDS TERMINATED BY ' '
 STORED AS TEXTFILE;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/decimal_precision.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/decimal_precision.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/decimal_precision.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/decimal_precision.q Tue Nov 26 08:19:25 2013
@@ -1,6 +1,6 @@
 DROP TABLE IF EXISTS DECIMAL_PRECISION;
 
-CREATE TABLE DECIMAL_PRECISION(dec decimal(60,30)) 
+CREATE TABLE DECIMAL_PRECISION(dec decimal(38,18))
 ROW FORMAT DELIMITED
    FIELDS TERMINATED BY ' '
 STORED AS TEXTFILE;
@@ -17,11 +17,11 @@ SELECT dec, dec * dec FROM DECIMAL_PRECI
 
 SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION;
 
-SELECT dec * cast('123456789012345678901234567890.123456789' as decimal(39,9)) FROM DECIMAL_PRECISION LIMIT 1;
-SELECT * from DECIMAL_PRECISION WHERE dec > cast('123456789012345678901234567890.123456789' as decimal(39,9)) LIMIT 1;
-SELECT dec * 123456789012345678901234567890.123456789 FROM DECIMAL_PRECISION LIMIT 1;
+SELECT dec * cast('12345678901234567890.12345678' as decimal(38,18)) FROM DECIMAL_PRECISION LIMIT 1;
+SELECT * from DECIMAL_PRECISION WHERE dec > cast('1234567890123456789012345678.12345678' as decimal(38,18)) LIMIT 1;
+SELECT dec * 12345678901234567890.12345678 FROM DECIMAL_PRECISION LIMIT 1;
 
-SELECT MIN(cast('123456789012345678901234567890.123456789' as decimal(39,9))) FROM DECIMAL_PRECISION;
-SELECT COUNT(cast('123456789012345678901234567890.123456789' as decimal(39,9))) FROM DECIMAL_PRECISION;
+SELECT MIN(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION;
+SELECT COUNT(cast('12345678901234567890.12345678' as decimal(38,18))) FROM DECIMAL_PRECISION;
 
 DROP TABLE DECIMAL_PRECISION;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/decimal_udf.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/decimal_udf.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/decimal_udf.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/decimal_udf.q Tue Nov 26 08:19:25 2013
@@ -2,7 +2,7 @@ set hive.fetch.task.conversion=more;
 
 DROP TABLE IF EXISTS DECIMAL_UDF;
 
-CREATE TABLE DECIMAL_UDF (key decimal(65,30), value int) 
+CREATE TABLE DECIMAL_UDF (key decimal(38,18), value int)
 ROW FORMAT DELIMITED
    FIELDS TERMINATED BY ' '
 STORED AS TEXTFILE;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/insert_into3.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/insert_into3.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/insert_into3.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/insert_into3.q Tue Nov 26 08:19:25 2013
@@ -4,10 +4,10 @@ DROP TABLE insert_into3b;
 CREATE TABLE insert_into3a (key int, value string);
 CREATE TABLE insert_into3b (key int, value string);
 
-EXPLAIN FROM src INSERT INTO TABLE insert_into3a SELECT * LIMIT 50
-                 INSERT INTO TABLE insert_into3b SELECT * LIMIT 100;
-FROM src INSERT INTO TABLE insert_into3a SELECT * LIMIT 50
-         INSERT INTO TABLE insert_into3b SELECT * LIMIT 100;
+EXPLAIN FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+                 INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100;
+FROM src INSERT INTO TABLE insert_into3a SELECT * ORDER BY key, value LIMIT 50
+         INSERT INTO TABLE insert_into3b SELECT * ORDER BY key, value LIMIT 100;
 SELECT SUM(HASH(c)) FROM (
     SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into3a
 ) t;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/serde_regex.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/serde_regex.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/serde_regex.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/serde_regex.q Tue Nov 26 08:19:25 2013
@@ -42,7 +42,7 @@ DROP TABLE serde_regex;
 
 EXPLAIN
 CREATE TABLE serde_regex1(
-  key decimal(65,30),
+  key decimal(38,18),
   value int)
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.RegexSerDe'
 WITH SERDEPROPERTIES (
@@ -51,7 +51,7 @@ WITH SERDEPROPERTIES (
 STORED AS TEXTFILE;
 
 CREATE TABLE serde_regex1(
-  key decimal(65,30),
+  key decimal(38,18),
   value int)
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.RegexSerDe'
 WITH SERDEPROPERTIES (

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/stats19.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/stats19.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/stats19.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/stats19.q Tue Nov 26 08:19:25 2013
@@ -1,7 +1,7 @@
 set datanucleus.cache.collections=false;
 set hive.stats.autogather=true;
 set hive.stats.reliable=true;
-set hive.stats.dbclass=dummy;
+set hive.stats.dbclass=custom;
 set hive.stats.default.publisher=org.apache.hadoop.hive.ql.stats.DummyStatsPublisher;
 set hive.stats.default.aggregator=org.apache.hadoop.hive.ql.stats.KeyVerifyingStatsAggregator;
 
@@ -56,7 +56,7 @@ insert overwrite table stats_part partit
 desc formatted stats_part partition (ds='2010-04-08', hr = '13');
 
 
-set hive.stats.dbclass=dummy;
+set hive.stats.dbclass=custom;
 set hive.stats.default.publisher=org.apache.hadoop.hive.ql.stats.DummyStatsPublisher;
 set hive.stats.default.aggregator=org.apache.hadoop.hive.ql.stats.KeyVerifyingStatsAggregator;
 set hive.stats.key.prefix.max.length=0;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/stats_aggregator_error_1.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/stats_aggregator_error_1.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/stats_aggregator_error_1.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/stats_aggregator_error_1.q Tue Nov 26 08:19:25 2013
@@ -1,12 +1,12 @@
 -- In this test, there is a dummy stats aggregator which throws an error when various
--- methods are called (as indicated by the parameter hive.test.dummystats.agregator)
+-- methods are called (as indicated by the parameter hive.test.dummystats.aggregator)
 -- Since stats need not be reliable (by setting hive.stats.reliable to false), the 
 -- insert statements succeed. The insert statement succeeds even if the stats aggregator
 -- is set to null, since stats need not be reliable.
 
 create table tmptable(key string, value string);
 
-set hive.stats.dbclass=dummy;
+set hive.stats.dbclass=custom;
 set hive.stats.default.publisher=org.apache.hadoop.hive.ql.stats.DummyStatsPublisher;
 set hive.stats.default.aggregator=org.apache.hadoop.hive.ql.stats.DummyStatsAggregator;
 set hive.stats.reliable=false;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/stats_publisher_error_1.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/stats_publisher_error_1.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/stats_publisher_error_1.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/stats_publisher_error_1.q Tue Nov 26 08:19:25 2013
@@ -6,7 +6,7 @@
 
 create table tmptable(key string, value string);
 
-set hive.stats.dbclass=dummy;
+set hive.stats.dbclass=custom;
 set hive.stats.default.publisher=org.apache.hadoop.hive.ql.stats.DummyStatsPublisher;
 set hive.stats.default.aggregator=org.apache.hadoop.hive.ql.stats.DummyStatsAggregator;
 set hive.stats.reliable=false;

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/udf_case.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/udf_case.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/udf_case.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/udf_case.q Tue Nov 26 08:19:25 2013
@@ -63,3 +63,20 @@ FROM src tablesample (1 rows);
 SELECT CASE 1 WHEN 1 THEN 'yo'
 ELSE reflect('java.lang.String', 'bogus', 1) END
 FROM src tablesample (1 rows);
+
+-- Allow compatible types in when/return type
+SELECT CASE 1
+        WHEN 1 THEN 123.0BD
+        ELSE 0.0BD
+       END,
+       CASE 1
+        WHEN 1.0 THEN 123
+        WHEN 2 THEN 1.0
+        ELSE 222.02BD
+       END,
+       CASE 'abc'
+        WHEN cast('abc' as varchar(3)) THEN 'abcd'
+        WHEN 'efg' THEN cast('efgh' as varchar(10))
+        ELSE cast('ijkl' as char(4))
+       END
+FROM src tablesample (1 rows);

Modified: hive/branches/tez/ql/src/test/queries/clientpositive/udf_when.q
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/queries/clientpositive/udf_when.q?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/queries/clientpositive/udf_when.q (original)
+++ hive/branches/tez/ql/src/test/queries/clientpositive/udf_when.q Tue Nov 26 08:19:25 2013
@@ -57,3 +57,20 @@ SELECT CASE
         WHEN 28=28 THEN NULL
        END
 FROM src tablesample (1 rows);
+
+-- Allow compatible types to be used in return value
+SELECT CASE
+        WHEN 1=1 THEN 123.0BD
+        ELSE 0.0BD
+       END,
+       CASE
+        WHEN 1=1 THEN 123
+        WHEN 1=2 THEN 1.0
+        ELSE 222.02BD
+       END,
+       CASE
+        WHEN 1=1 THEN 'abcd'
+        WHEN 1=2 THEN cast('efgh' as varchar(10))
+        ELSE cast('ijkl' as char(4))
+       END
+FROM src tablesample (1 rows);

Modified: hive/branches/tez/ql/src/test/results/clientnegative/invalid_arithmetic_type.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/invalid_arithmetic_type.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/invalid_arithmetic_type.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/invalid_arithmetic_type.q.out Tue Nov 26 08:19:25 2013
@@ -1 +1 @@
-FAILED: SemanticException Line 0:-1 Wrong arguments ''2000-01-01 00:00:01'': No matching method for class org.apache.hadoop.hive.ql.udf.UDFOPMinus with (timestamp, timestamp)
+FAILED: SemanticException Line 0:-1 Wrong arguments ''2000-01-01 00:00:01'': No matching method for class org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPMinus with (timestamp, timestamp)

Modified: hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out Tue Nov 26 08:19:25 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToInteger with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(float)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToInteger with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(38,18))  _FUNC_(double)  _FUNC_(float)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out Tue Nov 26 08:19:25 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToByte with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToByte with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(38,18))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(void)  

Modified: hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out Tue Nov 26 08:19:25 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToShort with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToShort with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(38,18))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out Tue Nov 26 08:19:25 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToLong with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToLong with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(38,18))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out Tue Nov 26 08:19:25 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(38,18))  _FUNC_(double)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out Tue Nov 26 08:19:25 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToDouble with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToDouble with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(38,18))  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/tez/ql/src/test/results/clientnegative/udf_assert_true2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/udf_assert_true2.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/udf_assert_true2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/udf_assert_true2.q.out Tue Nov 26 08:19:25 2013
@@ -23,7 +23,7 @@ STAGE PLANS:
                   Select Operator
                     expressions:
                           expr: (1 + assert_true((_col4 < 2)))
-                          type: int
+                          type: double
                     outputColumnNames: _col0
                     Limit
                       File Output Operator
@@ -45,7 +45,7 @@ STAGE PLANS:
                     Select Operator
                       expressions:
                             expr: (1 + assert_true((_col4 < 2)))
-                            type: int
+                            type: double
                       outputColumnNames: _col0
                       Limit
                         File Output Operator

Modified: hive/branches/tez/ql/src/test/results/clientnegative/wrong_column_type.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientnegative/wrong_column_type.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientnegative/wrong_column_type.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientnegative/wrong_column_type.q.out Tue Nov 26 08:19:25 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE dest1(a float)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@dest1
-FAILED: NoMatchingMethodException No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array<double>). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: NoMatchingMethodException No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array<double>). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(38,18))  _FUNC_(double)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/alter_partition_coltype.q.out Tue Nov 26 08:19:25 2013
@@ -112,16 +112,24 @@ STAGE PLANS:
         alter_coltype 
           TableScan
             alias: alter_coltype
+            Statistics:
+                numRows: 25 dataSize: 191 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
+              Statistics:
+                  numRows: 25 dataSize: 191 basicStatsState: COMPLETE colStatsState: COMPLETE
               Group By Operator
                 aggregations:
                       expr: count()
                 bucketGroup: false
                 mode: hash
                 outputColumnNames: _col0
+                Statistics:
+                    numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
                 Reduce Output Operator
                   sort order: 
+                  Statistics:
+                      numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
                   tag: -1
                   value expressions:
                         expr: _col0
@@ -181,16 +189,22 @@ STAGE PLANS:
           bucketGroup: false
           mode: mergepartial
           outputColumnNames: _col0
+          Statistics:
+              numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
           Select Operator
             expressions:
                   expr: _col0
                   type: bigint
             outputColumnNames: _col0
+            Statistics:
+                numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
             File Output Operator
               compressed: false
               GlobalTableId: 0
 #### A masked pattern was here ####
               NumFilesPerFileSink: 1
+              Statistics:
+                  numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
 #### A masked pattern was here ####
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -308,16 +322,24 @@ STAGE PLANS:
         alter_coltype 
           TableScan
             alias: alter_coltype
+            Statistics:
+                numRows: 25 dataSize: 191 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
+              Statistics:
+                  numRows: 25 dataSize: 191 basicStatsState: COMPLETE colStatsState: COMPLETE
               Group By Operator
                 aggregations:
                       expr: count()
                 bucketGroup: false
                 mode: hash
                 outputColumnNames: _col0
+                Statistics:
+                    numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
                 Reduce Output Operator
                   sort order: 
+                  Statistics:
+                      numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
                   tag: -1
                   value expressions:
                         expr: _col0
@@ -377,16 +399,22 @@ STAGE PLANS:
           bucketGroup: false
           mode: mergepartial
           outputColumnNames: _col0
+          Statistics:
+              numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
           Select Operator
             expressions:
                   expr: _col0
                   type: bigint
             outputColumnNames: _col0
+            Statistics:
+                numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
             File Output Operator
               compressed: false
               GlobalTableId: 0
 #### A masked pattern was here ####
               NumFilesPerFileSink: 1
+              Statistics:
+                  numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
 #### A masked pattern was here ####
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -457,21 +485,31 @@ STAGE PLANS:
         alter_coltype 
           TableScan
             alias: alter_coltype
+            Statistics:
+                numRows: 75 dataSize: 573 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Filter Operator
               isSamplingPred: false
               predicate:
                   expr: ((ts = 3.0) and (dt = 10))
                   type: boolean
+              Statistics:
+                  numRows: 75 dataSize: 0 basicStatsState: PARTIAL colStatsState: COMPLETE
               Select Operator
+                Statistics:
+                    numRows: 75 dataSize: 0 basicStatsState: PARTIAL colStatsState: COMPLETE
                 Group By Operator
                   aggregations:
                         expr: count()
                   bucketGroup: false
                   mode: hash
                   outputColumnNames: _col0
+                  Statistics:
+                      numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
                   Reduce Output Operator
                     sort order: 
+                    Statistics:
+                        numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
                     tag: -1
                     value expressions:
                           expr: _col0
@@ -617,16 +655,22 @@ STAGE PLANS:
           bucketGroup: false
           mode: mergepartial
           outputColumnNames: _col0
+          Statistics:
+              numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
           Select Operator
             expressions:
                   expr: _col0
                   type: bigint
             outputColumnNames: _col0
+            Statistics:
+                numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
             File Output Operator
               compressed: false
               GlobalTableId: 0
 #### A masked pattern was here ####
               NumFilesPerFileSink: 1
+              Statistics:
+                  numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
 #### A masked pattern was here ####
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -769,6 +813,8 @@ STAGE PLANS:
         alter_coltype 
           TableScan
             alias: alter_coltype
+            Statistics:
+                numRows: 75 dataSize: 573 basicStatsState: COMPLETE colStatsState: NONE
             GatherStats: false
             Select Operator
               expressions:
@@ -781,11 +827,15 @@ STAGE PLANS:
                     expr: ts
                     type: string
               outputColumnNames: _col0, _col1, _col2, _col3
+              Statistics:
+                  numRows: 75 dataSize: 573 basicStatsState: COMPLETE colStatsState: NONE
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
 #### A masked pattern was here ####
                 NumFilesPerFileSink: 1
+                Statistics:
+                    numRows: 75 dataSize: 573 basicStatsState: COMPLETE colStatsState: NONE
 #### A masked pattern was here ####
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat
@@ -1008,16 +1058,24 @@ STAGE PLANS:
         alter_coltype 
           TableScan
             alias: alter_coltype
+            Statistics:
+                numRows: 75 dataSize: 573 basicStatsState: COMPLETE colStatsState: COMPLETE
             GatherStats: false
             Select Operator
+              Statistics:
+                  numRows: 75 dataSize: 573 basicStatsState: COMPLETE colStatsState: COMPLETE
               Group By Operator
                 aggregations:
                       expr: count()
                 bucketGroup: false
                 mode: hash
                 outputColumnNames: _col0
+                Statistics:
+                    numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
                 Reduce Output Operator
                   sort order: 
+                  Statistics:
+                      numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
                   tag: -1
                   value expressions:
                         expr: _col0
@@ -1163,16 +1221,22 @@ STAGE PLANS:
           bucketGroup: false
           mode: mergepartial
           outputColumnNames: _col0
+          Statistics:
+              numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
           Select Operator
             expressions:
                   expr: _col0
                   type: bigint
             outputColumnNames: _col0
+            Statistics:
+                numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
             File Output Operator
               compressed: false
               GlobalTableId: 0
 #### A masked pattern was here ####
               NumFilesPerFileSink: 1
+              Statistics:
+                  numRows: 1 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE
 #### A masked pattern was here ####
               table:
                   input format: org.apache.hadoop.mapred.TextInputFormat

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_join13.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_join13.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_join13.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_join13.q.out Tue Nov 26 08:19:25 2013
@@ -78,8 +78,8 @@ STAGE PLANS:
                     1 
                   handleSkewJoin: false
                   keys:
-                    0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()]
-                    1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()]
+                    0 [GenericUDFOPPlus(Column[_col0], Column[_col2])]
+                    1 [GenericUDFBridge(Column[_col0])]
                   Position of Big Table: 0
 
   Stage: Stage-3
@@ -119,8 +119,8 @@ STAGE PLANS:
                       1 
                     handleSkewJoin: false
                     keys:
-                      0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()]
-                      1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()]
+                      0 [GenericUDFOPPlus(Column[_col0], Column[_col2])]
+                      1 [GenericUDFBridge(Column[_col0])]
                     outputColumnNames: _col1, _col2
                     Position of Big Table: 0
                     Select Operator

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_join2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_join2.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_join2.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_join2.q.out Tue Nov 26 08:19:25 2013
@@ -52,8 +52,8 @@ STAGE PLANS:
                 1 {value}
               handleSkewJoin: false
               keys:
-                0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col4]()]
-                1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[key]()]
+                0 [GenericUDFOPPlus(Column[_col0], Column[_col4])]
+                1 [GenericUDFBridge(Column[key])]
               Position of Big Table: 0
 
   Stage: Stage-6
@@ -82,8 +82,8 @@ STAGE PLANS:
                   1 {value}
                 handleSkewJoin: false
                 keys:
-                  0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col4]()]
-                  1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[key]()]
+                  0 [GenericUDFOPPlus(Column[_col0], Column[_col4])]
+                  1 [GenericUDFBridge(Column[key])]
                 outputColumnNames: _col4, _col9
                 Position of Big Table: 0
                 Select Operator

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out Tue Nov 26 08:19:25 2013
@@ -111,6 +111,8 @@ STAGE PLANS:
         dim_pay_date 
           TableScan
             alias: dim_pay_date
+            Statistics:
+                numRows: 1 dataSize: 36 basicStatsState: COMPLETE colStatsState: NONE
             GatherStats: false
             Reduce Output Operator
               key expressions:
@@ -120,6 +122,8 @@ STAGE PLANS:
               Map-reduce partition columns:
                     expr: date
                     type: string
+              Statistics:
+                  numRows: 1 dataSize: 36 basicStatsState: COMPLETE colStatsState: NONE
               tag: 1
               value expressions:
                     expr: date
@@ -127,6 +131,8 @@ STAGE PLANS:
         orderpayment 
           TableScan
             alias: orderpayment
+            Statistics:
+                numRows: 1 dataSize: 36 basicStatsState: COMPLETE colStatsState: NONE
             GatherStats: false
             Reduce Output Operator
               key expressions:
@@ -136,6 +142,8 @@ STAGE PLANS:
               Map-reduce partition columns:
                     expr: date
                     type: string
+              Statistics:
+                  numRows: 1 dataSize: 36 basicStatsState: COMPLETE colStatsState: NONE
               tag: 0
               value expressions:
                     expr: dealid
@@ -201,6 +209,8 @@ STAGE PLANS:
             1 {VALUE._col1}
           handleSkewJoin: false
           outputColumnNames: _col0, _col3, _col4, _col8
+          Statistics:
+              numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
           File Output Operator
             compressed: false
             GlobalTableId: 0
@@ -233,6 +243,8 @@ STAGE PLANS:
               Map-reduce partition columns:
                     expr: _col0
                     type: int
+              Statistics:
+                  numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
               tag: 0
               value expressions:
                     expr: _col8
@@ -244,6 +256,8 @@ STAGE PLANS:
         deal 
           TableScan
             alias: deal
+            Statistics:
+                numRows: 1 dataSize: 36 basicStatsState: COMPLETE colStatsState: NONE
             GatherStats: false
             Reduce Output Operator
               key expressions:
@@ -253,6 +267,8 @@ STAGE PLANS:
               Map-reduce partition columns:
                     expr: dealid
                     type: int
+              Statistics:
+                  numRows: 1 dataSize: 36 basicStatsState: COMPLETE colStatsState: NONE
               tag: 1
               value expressions:
                     expr: dealid
@@ -335,6 +351,8 @@ STAGE PLANS:
             1 {VALUE._col0}
           handleSkewJoin: false
           outputColumnNames: _col1, _col10, _col11, _col14
+          Statistics:
+              numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
           File Output Operator
             compressed: false
             GlobalTableId: 0
@@ -367,6 +385,8 @@ STAGE PLANS:
               Map-reduce partition columns:
                     expr: _col10
                     type: int
+              Statistics:
+                  numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
               tag: 0
               value expressions:
                     expr: _col14
@@ -378,6 +398,8 @@ STAGE PLANS:
         order_city 
           TableScan
             alias: order_city
+            Statistics:
+                numRows: 1 dataSize: 36 basicStatsState: COMPLETE colStatsState: NONE
             GatherStats: false
             Reduce Output Operator
               key expressions:
@@ -387,6 +409,8 @@ STAGE PLANS:
               Map-reduce partition columns:
                     expr: cityid
                     type: int
+              Statistics:
+                  numRows: 1 dataSize: 36 basicStatsState: COMPLETE colStatsState: NONE
               tag: 1
       Path -> Alias:
 #### A masked pattern was here ####
@@ -466,6 +490,8 @@ STAGE PLANS:
             1 
           handleSkewJoin: false
           outputColumnNames: _col1, _col7, _col18
+          Statistics:
+              numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
           File Output Operator
             compressed: false
             GlobalTableId: 0
@@ -498,6 +524,8 @@ STAGE PLANS:
               Map-reduce partition columns:
                     expr: _col18
                     type: int
+              Statistics:
+                  numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
               tag: 0
               value expressions:
                     expr: _col7
@@ -507,6 +535,8 @@ STAGE PLANS:
         user 
           TableScan
             alias: user
+            Statistics:
+                numRows: 100 dataSize: 288 basicStatsState: COMPLETE colStatsState: NONE
             GatherStats: false
             Reduce Output Operator
               key expressions:
@@ -516,6 +546,8 @@ STAGE PLANS:
               Map-reduce partition columns:
                     expr: userid
                     type: int
+              Statistics:
+                  numRows: 100 dataSize: 288 basicStatsState: COMPLETE colStatsState: NONE
               tag: 1
       Path -> Alias:
 #### A masked pattern was here ####
@@ -595,6 +627,8 @@ STAGE PLANS:
             1 
           handleSkewJoin: false
           outputColumnNames: _col1, _col7
+          Statistics:
+              numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
           Select Operator
             expressions:
                   expr: _col1
@@ -602,12 +636,18 @@ STAGE PLANS:
                   expr: _col7
                   type: int
             outputColumnNames: _col0, _col1
+            Statistics:
+                numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
             Limit
+              Statistics:
+                  numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
               File Output Operator
                 compressed: false
                 GlobalTableId: 0
 #### A masked pattern was here ####
                 NumFilesPerFileSink: 1
+                Statistics:
+                    numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE
 #### A masked pattern was here ####
                 table:
                     input format: org.apache.hadoop.mapred.TextInputFormat

Modified: hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out?rev=1545564&r1=1545563&r2=1545564&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (original)
+++ hive/branches/tez/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out Tue Nov 26 08:19:25 2013
@@ -97,6 +97,8 @@ STAGE PLANS:
         b 
           TableScan
             alias: b
+            Statistics:
+                numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE
             GatherStats: false
             Sorted Merge Bucket Map Join Operator
               condition map:
@@ -290,6 +292,8 @@ STAGE PLANS:
         a 
           TableScan
             alias: a
+            Statistics:
+                numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE
             GatherStats: false
             Sorted Merge Bucket Map Join Operator
               condition map:
@@ -1070,6 +1074,8 @@ STAGE PLANS:
         a 
           TableScan
             alias: a
+            Statistics:
+                numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE
             GatherStats: false
             Sorted Merge Bucket Map Join Operator
               condition map: