You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by br...@apache.org on 2013/10/28 19:44:09 UTC

svn commit: r1536480 [8/12] - in /hive/branches/maven: ./ ant/src/org/apache/hadoop/hive/ant/ common/src/java/conf/ common/src/java/org/apache/hadoop/hive/common/type/ common/src/test/org/apache/hadoop/hive/common/type/ data/files/ hcatalog/core/src/ma...

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Mon Oct 28 18:44:01 2013
@@ -26,17 +26,23 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterExprAndExpr;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.FilterExprOrExpr;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import junit.framework.Assert;
+
+import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.DoubleColUnaryMinus;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterDoubleColLessDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColEqualLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColGreaterLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColLessDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarGreaterLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterStringColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRoundDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColAddLongColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColEqualLongScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColGreaterLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColModuloLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColMultiplyLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.LongColSubtractLongColumn;
@@ -48,24 +54,75 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
+import org.apache.hadoop.hive.ql.udf.UDFLTrim;
+import org.apache.hadoop.hive.ql.udf.UDFLog;
 import org.apache.hadoop.hive.ql.udf.UDFOPMinus;
 import org.apache.hadoop.hive.ql.udf.UDFOPMod;
 import org.apache.hadoop.hive.ql.udf.UDFOPMultiply;
 import org.apache.hadoop.hive.ql.udf.UDFOPNegative;
 import org.apache.hadoop.hive.ql.udf.UDFOPPlus;
+import org.apache.hadoop.hive.ql.udf.UDFPower;
+import org.apache.hadoop.hive.ql.udf.UDFRound;
+import org.apache.hadoop.hive.ql.udf.UDFSin;
+import org.apache.hadoop.hive.ql.udf.UDFYear;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLower;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNot;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
 
 public class TestVectorizationContext {
 
   @Test
+  public void testVectorExpressionDescriptor() {
+    VectorUDFUnixTimeStampLong v1 = new VectorUDFUnixTimeStampLong();
+    VectorExpressionDescriptor.Builder builder1 = new VectorExpressionDescriptor.Builder();
+    VectorExpressionDescriptor.Descriptor d1 = builder1.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.LONG)
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+    Assert.assertEquals(d1, v1.getDescriptor());
+
+    VectorExpressionDescriptor.Builder builder2 = new VectorExpressionDescriptor.Builder();
+    VectorExpressionDescriptor.Descriptor d2 = builder2.setMode(VectorExpressionDescriptor.Mode.FILTER)
+        .setNumArguments(2).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.LONG,
+            VectorExpressionDescriptor.ArgumentType.DOUBLE).setInputExpressionTypes(
+            VectorExpressionDescriptor.InputExpressionType.COLUMN,
+            VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
+    FilterLongColLessDoubleScalar v2 = new FilterLongColLessDoubleScalar();
+    Assert.assertEquals(d2, v2.getDescriptor());
+
+    VectorExpressionDescriptor.Builder builder3 = new VectorExpressionDescriptor.Builder();
+    VectorExpressionDescriptor.Descriptor d3 = builder3.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.STRING)
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+    StringLower v3 = new StringLower();
+    Assert.assertEquals(d3, v3.getDescriptor());
+
+    VectorExpressionDescriptor.Builder builder4 = new VectorExpressionDescriptor.Builder();
+    VectorExpressionDescriptor.Descriptor d4 = builder4.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.ANY)
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+    StringUpper v4 = new StringUpper();
+    Assert.assertEquals(d4, v4.getDescriptor());
+
+    VectorExpressionDescriptor.Builder builder5 = new VectorExpressionDescriptor.Builder();
+    VectorExpressionDescriptor.Descriptor d5 = builder5.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
+        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.STRING)
+        .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
+    IsNull v5 = new IsNull();
+    Assert.assertEquals(d5, v5.getDescriptor());
+  }
+
+  @Test
   public void testArithmeticExpressionVectorization() throws HiveException {
     /**
      * Create original expression tree for following
@@ -78,14 +135,19 @@ public class TestVectorizationContext {
     GenericUDFBridge udf5 = new GenericUDFBridge("%", true, UDFOPMod.class.getCanonicalName());
 
     ExprNodeGenericFuncDesc sumExpr = new ExprNodeGenericFuncDesc();
+    sumExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
     sumExpr.setGenericUDF(udf1);
     ExprNodeGenericFuncDesc minusExpr = new ExprNodeGenericFuncDesc();
+    minusExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
     minusExpr.setGenericUDF(udf2);
     ExprNodeGenericFuncDesc multiplyExpr = new ExprNodeGenericFuncDesc();
+    multiplyExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
     multiplyExpr.setGenericUDF(udf3);
     ExprNodeGenericFuncDesc sum2Expr = new ExprNodeGenericFuncDesc();
+    sum2Expr.setTypeInfo(TypeInfoFactory.intTypeInfo);
     sum2Expr.setGenericUDF(udf4);
     ExprNodeGenericFuncDesc modExpr = new ExprNodeGenericFuncDesc();
+    modExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
     modExpr.setGenericUDF(udf5);
 
     ExprNodeColumnDesc col1Expr = new  ExprNodeColumnDesc(Long.class, "col1", "table", false);
@@ -132,7 +194,7 @@ public class TestVectorizationContext {
     //Generate vectorized expression
     VectorizationContext vc = new VectorizationContext(columnMap, 6);
 
-    VectorExpression ve = vc.getVectorExpression(sumExpr);
+    VectorExpression ve = vc.getVectorExpression(sumExpr, VectorExpressionDescriptor.Mode.PROJECTION);
 
     //Verify vectorized expression
     assertTrue(ve instanceof LongColAddLongColumn);
@@ -172,9 +234,8 @@ public class TestVectorizationContext {
     columnMap.put("col2", 2);
 
     VectorizationContext vc = new VectorizationContext(columnMap, 2);
-    vc.setOperatorType(OperatorType.FILTER);
 
-    VectorExpression ve = vc.getVectorExpression(exprDesc);
+    VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
     assertTrue(ve instanceof FilterStringColGreaterStringScalar);
   }
@@ -197,9 +258,8 @@ public class TestVectorizationContext {
     columnMap.put("col2", 2);
 
     VectorizationContext vc = new VectorizationContext(columnMap, 2);
-    vc.setOperatorType(OperatorType.FILTER);
 
-    VectorExpression ve = vc.getVectorExpression(exprDesc);
+    VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
     assertTrue(ve instanceof FilterStringColGreaterStringColumn);
   }
@@ -222,20 +282,20 @@ public class TestVectorizationContext {
     columnMap.put("col1", 0);
 
     VectorizationContext vc = new VectorizationContext(columnMap, 2);
-    vc.setOperatorType(OperatorType.SELECT);
 
-    VectorExpression ve = vc.getVectorExpression(exprDesc);
+    VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
     assertTrue(ve.getOutputType().equalsIgnoreCase("double"));
   }
 
   @Test
-  public void testVectorizeAndOrExpression() throws HiveException {
+  public void testVectorizeFilterAndOrExpression() throws HiveException {
     ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false);
     ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10));
 
     GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan();
     ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc();
+    greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
     greaterExprDesc.setGenericUDF(udf);
     List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
     children1.add(col1Expr);
@@ -247,6 +307,7 @@ public class TestVectorizationContext {
 
     GenericUDFOPLessThan udf2 = new GenericUDFOPLessThan();
     ExprNodeGenericFuncDesc lessExprDesc = new ExprNodeGenericFuncDesc();
+    lessExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
     lessExprDesc.setGenericUDF(udf2);
     List<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>(2);
     children2.add(col2Expr);
@@ -255,6 +316,7 @@ public class TestVectorizationContext {
 
     GenericUDFOPAnd andUdf = new GenericUDFOPAnd();
     ExprNodeGenericFuncDesc andExprDesc = new ExprNodeGenericFuncDesc();
+    andExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
     andExprDesc.setGenericUDF(andUdf);
     List<ExprNodeDesc> children3 = new ArrayList<ExprNodeDesc>(2);
     children3.add(greaterExprDesc);
@@ -266,9 +328,8 @@ public class TestVectorizationContext {
     columnMap.put("col2", 1);
 
     VectorizationContext vc = new VectorizationContext(columnMap, 2);
-    vc.setOperatorType(OperatorType.FILTER);
 
-    VectorExpression ve = vc.getVectorExpression(andExprDesc);
+    VectorExpression ve = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
     assertEquals(ve.getClass(), FilterExprAndExpr.class);
     assertEquals(ve.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class);
@@ -276,18 +337,204 @@ public class TestVectorizationContext {
 
     GenericUDFOPOr orUdf = new GenericUDFOPOr();
     ExprNodeGenericFuncDesc orExprDesc = new ExprNodeGenericFuncDesc();
+    orExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
     orExprDesc.setGenericUDF(orUdf);
     List<ExprNodeDesc> children4 = new ArrayList<ExprNodeDesc>(2);
     children4.add(greaterExprDesc);
     children4.add(lessExprDesc);
     orExprDesc.setChildren(children4);
+    VectorExpression veOr = vc.getVectorExpression(orExprDesc, VectorExpressionDescriptor.Mode.FILTER);
+    assertEquals(veOr.getClass(), FilterExprOrExpr.class);
+    assertEquals(veOr.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class);
+    assertEquals(veOr.getChildExpressions()[1].getClass(), FilterDoubleColLessDoubleScalar.class);
+  }
+
+  @Test
+  public void testVectorizeAndOrProjectionExpression() throws HiveException {
+    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false);
+    ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10));
+
+    GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan();
+    ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc();
+    greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    greaterExprDesc.setGenericUDF(udf);
+    List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(constDesc);
+    greaterExprDesc.setChildren(children1);
+
+    ExprNodeColumnDesc col2Expr = new ExprNodeColumnDesc(Boolean.class, "col2", "table", false);
 
+    GenericUDFOPAnd andUdf = new GenericUDFOPAnd();
+    ExprNodeGenericFuncDesc andExprDesc = new ExprNodeGenericFuncDesc();
+    andExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    andExprDesc.setGenericUDF(andUdf);
+    List<ExprNodeDesc> children3 = new ArrayList<ExprNodeDesc>(2);
+    children3.add(greaterExprDesc);
+    children3.add(col2Expr);
+    andExprDesc.setChildren(children3);
 
-    VectorExpression veOr = vc.getVectorExpression(orExprDesc);
+    Map<String, Integer> columnMap = new HashMap<String, Integer>();
+    columnMap.put("col1", 0);
+    columnMap.put("col2", 1);
+
+    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    VectorExpression veAnd = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.FILTER);
+    assertEquals(veAnd.getClass(), FilterExprAndExpr.class);
+    assertEquals(veAnd.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class);
+    assertEquals(veAnd.getChildExpressions()[1].getClass(), SelectColumnIsTrue.class);
+
+    veAnd = vc.getVectorExpression(andExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
+    assertEquals(veAnd.getClass(), ColAndCol.class);
+    assertEquals(1, veAnd.getChildExpressions().length);
+    assertEquals(veAnd.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
+    assertEquals(2, ((ColAndCol) veAnd).getColNum1());
+    assertEquals(1, ((ColAndCol) veAnd).getColNum2());
+    assertEquals(3, ((ColAndCol) veAnd).getOutputColumn());
+
+    //OR
+    GenericUDFOPOr orUdf = new GenericUDFOPOr();
+    ExprNodeGenericFuncDesc orExprDesc = new ExprNodeGenericFuncDesc();
+    orExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    orExprDesc.setGenericUDF(orUdf);
+    List<ExprNodeDesc> children4 = new ArrayList<ExprNodeDesc>(2);
+    children4.add(greaterExprDesc);
+    children4.add(col2Expr);
+    orExprDesc.setChildren(children4);
 
+    //Allocate new Vectorization context to reset the intermediate columns.
+    vc = new VectorizationContext(columnMap, 2);
+    VectorExpression veOr = vc.getVectorExpression(orExprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(veOr.getClass(), FilterExprOrExpr.class);
     assertEquals(veOr.getChildExpressions()[0].getClass(), FilterLongColGreaterLongScalar.class);
-    assertEquals(veOr.getChildExpressions()[1].getClass(), FilterDoubleColLessDoubleScalar.class);
+    assertEquals(veOr.getChildExpressions()[1].getClass(), SelectColumnIsTrue.class);
+
+    veOr = vc.getVectorExpression(orExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
+    assertEquals(veOr.getClass(), ColOrCol.class);
+    assertEquals(1, veAnd.getChildExpressions().length);
+    assertEquals(veAnd.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
+    assertEquals(2, ((ColOrCol) veOr).getColNum1());
+    assertEquals(1, ((ColOrCol) veOr).getColNum2());
+    assertEquals(3, ((ColOrCol) veOr).getOutputColumn());
+  }
+
+  @Test
+  public void testNotExpression() throws HiveException {
+    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false);
+    ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10));
+
+    GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan();
+    ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc();
+    greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    greaterExprDesc.setGenericUDF(udf);
+    List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(constDesc);
+    greaterExprDesc.setChildren(children1);
+
+    ExprNodeGenericFuncDesc notExpr = new ExprNodeGenericFuncDesc();
+    notExpr.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    GenericUDFOPNot notUdf = new GenericUDFOPNot();
+    notExpr.setGenericUDF(notUdf);
+    List<ExprNodeDesc> childOfNot = new ArrayList<ExprNodeDesc>();
+    childOfNot.add(greaterExprDesc);
+    notExpr.setChildren(childOfNot);
+
+    Map<String, Integer> columnMap = new HashMap<String, Integer>();
+    columnMap.put("col1", 0);
+    columnMap.put("col2", 1);
+
+    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+
+    VectorExpression ve = vc.getVectorExpression(notExpr, VectorExpressionDescriptor.Mode.FILTER);
+
+    assertEquals(ve.getClass(), SelectColumnIsFalse.class);
+    assertEquals(ve.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
+
+    ve = vc.getVectorExpression(notExpr, VectorExpressionDescriptor.Mode.PROJECTION);
+    assertEquals(ve.getClass(), NotCol.class);
+    assertEquals(ve.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
+  }
+
+  @Test
+  public void testNullExpressions() throws HiveException {
+    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false);
+    ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10));
+
+    GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan();
+    ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc();
+    greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    greaterExprDesc.setGenericUDF(udf);
+    List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(constDesc);
+    greaterExprDesc.setChildren(children1);
+
+    ExprNodeGenericFuncDesc isNullExpr = new ExprNodeGenericFuncDesc();
+    isNullExpr.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    GenericUDFOPNull isNullUdf = new GenericUDFOPNull();
+    isNullExpr.setGenericUDF(isNullUdf);
+    List<ExprNodeDesc> childOfIsNull = new ArrayList<ExprNodeDesc>();
+    childOfIsNull.add(greaterExprDesc);
+    isNullExpr.setChildren(childOfIsNull);
+
+    Map<String, Integer> columnMap = new HashMap<String, Integer>();
+    columnMap.put("col1", 0);
+    columnMap.put("col2", 1);
+
+    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+
+    VectorExpression ve = vc.getVectorExpression(isNullExpr, VectorExpressionDescriptor.Mode.FILTER);
+
+    assertEquals(ve.getClass(), SelectColumnIsNull.class);
+    assertEquals(ve.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
+    assertEquals(2, ve.getChildExpressions()[0].getOutputColumn());
+    assertEquals(2, ((SelectColumnIsNull) ve).getColNum());
+
+    ve = vc.getVectorExpression(isNullExpr, VectorExpressionDescriptor.Mode.PROJECTION);
+    assertEquals(ve.getClass(), IsNull.class);
+    assertEquals(2, ((IsNull) ve).getColNum());
+    assertEquals(ve.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
+  }
+
+  @Test
+  public void testNotNullExpressions() throws HiveException {
+    ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Integer.class, "col1", "table", false);
+    ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc(new Integer(10));
+
+    GenericUDFOPGreaterThan udf = new GenericUDFOPGreaterThan();
+    ExprNodeGenericFuncDesc greaterExprDesc = new ExprNodeGenericFuncDesc();
+    greaterExprDesc.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    greaterExprDesc.setGenericUDF(udf);
+    List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(constDesc);
+    greaterExprDesc.setChildren(children1);
+
+    ExprNodeGenericFuncDesc isNotNullExpr = new ExprNodeGenericFuncDesc();
+    isNotNullExpr.setTypeInfo(TypeInfoFactory.booleanTypeInfo);
+    GenericUDFOPNotNull notNullUdf = new GenericUDFOPNotNull();
+    isNotNullExpr.setGenericUDF(notNullUdf);
+    List<ExprNodeDesc> childOfNot = new ArrayList<ExprNodeDesc>();
+    childOfNot.add(greaterExprDesc);
+    isNotNullExpr.setChildren(childOfNot);
+
+    Map<String, Integer> columnMap = new HashMap<String, Integer>();
+    columnMap.put("col1", 0);
+    columnMap.put("col2", 1);
+
+    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+
+    VectorExpression ve = vc.getVectorExpression(isNotNullExpr, VectorExpressionDescriptor.Mode.FILTER);
+
+    assertEquals(ve.getClass(), SelectColumnIsNotNull.class);
+    assertEquals(2, ((SelectColumnIsNotNull) ve).getColNum());
+    assertEquals(ve.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
+
+    ve = vc.getVectorExpression(isNotNullExpr, VectorExpressionDescriptor.Mode.PROJECTION);
+    assertEquals(ve.getClass(), IsNotNull.class);
+    assertEquals(2, ((IsNotNull) ve).getColNum());
+    assertEquals(ve.getChildExpressions()[0].getClass(), LongColGreaterLongScalar.class);
   }
 
   @Test
@@ -308,7 +555,7 @@ public class TestVectorizationContext {
     columnMap.put("a", 0);
 
     VectorizationContext vc = new VectorizationContext(columnMap, 2);
-    VectorExpression ve = vc.getVectorExpression(scalarMinusConstant);
+    VectorExpression ve = vc.getVectorExpression(scalarMinusConstant, VectorExpressionDescriptor.Mode.PROJECTION);
 
     assertEquals(ve.getClass(), LongScalarSubtractLongColumn.class);
   }
@@ -331,9 +578,8 @@ public class TestVectorizationContext {
     columnMap.put("col2", 2);
 
     VectorizationContext vc = new VectorizationContext(columnMap, 2);
-    vc.setOperatorType(OperatorType.FILTER);
 
-    VectorExpression ve = vc.getVectorExpression(exprDesc);
+    VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
     assertTrue(ve instanceof FilterLongColGreaterLongScalar);
   }
@@ -350,9 +596,8 @@ public class TestVectorizationContext {
     Map<String, Integer> columnMap = new HashMap<String, Integer>();
     columnMap.put("col1", 1);
     VectorizationContext vc = new VectorizationContext(columnMap, 1);
-    vc.setOperatorType(OperatorType.SELECT);
 
-    VectorExpression ve = vc.getVectorExpression(negExprDesc);
+    VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
     assertTrue( ve instanceof LongColUnaryMinus);
   }
@@ -369,9 +614,8 @@ public class TestVectorizationContext {
     Map<String, Integer> columnMap = new HashMap<String, Integer>();
     columnMap.put("col1", 1);
     VectorizationContext vc = new VectorizationContext(columnMap, 1);
-    vc.setOperatorType(OperatorType.SELECT);
 
-    VectorExpression ve = vc.getVectorExpression(negExprDesc);
+    VectorExpression ve = vc.getVectorExpression(negExprDesc, VectorExpressionDescriptor.Mode.PROJECTION);
 
     assertTrue( ve instanceof DoubleColUnaryMinus);
   }
@@ -396,8 +640,7 @@ public class TestVectorizationContext {
     columnMap.put("a", 0);
 
     VectorizationContext vc = new VectorizationContext(columnMap, 2);
-    vc.setOperatorType(OperatorType.FILTER);
-    VectorExpression ve = vc.getVectorExpression(scalarGreaterColExpr);
+    VectorExpression ve = vc.getVectorExpression(scalarGreaterColExpr, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(FilterLongScalarGreaterLongColumn.class, ve.getClass());
   }
 
@@ -421,8 +664,199 @@ public class TestVectorizationContext {
     columnMap.put("a", 0);
 
     VectorizationContext vc = new VectorizationContext(columnMap, 2);
-    vc.setOperatorType(OperatorType.FILTER);
-    VectorExpression ve = vc.getVectorExpression(colEqualScalar);
+    VectorExpression ve = vc.getVectorExpression(colEqualScalar, VectorExpressionDescriptor.Mode.FILTER);
     assertEquals(FilterLongColEqualLongScalar.class, ve.getClass());
   }
+
+  @Test
+  public void testBooleanColumnCompareBooleanScalar() throws HiveException {
+    ExprNodeGenericFuncDesc colEqualScalar = new ExprNodeGenericFuncDesc();
+    GenericUDFOPEqual gudf = new GenericUDFOPEqual();
+    colEqualScalar.setGenericUDF(gudf);
+    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(2);
+    ExprNodeConstantDesc constDesc =
+        new ExprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, 20);
+    ExprNodeColumnDesc colDesc =
+        new ExprNodeColumnDesc(Boolean.class, "a", "table", false);
+
+    children.add(colDesc);
+    children.add(constDesc);
+
+    colEqualScalar.setChildren(children);
+
+    Map<String, Integer> columnMap = new HashMap<String, Integer>();
+    columnMap.put("a", 0);
+    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+    VectorExpression ve = vc.getVectorExpression(colEqualScalar, VectorExpressionDescriptor.Mode.PROJECTION);
+    assertEquals(LongColEqualLongScalar.class, ve.getClass());
+  }
+
+  @Test
+  public void testUnaryStringExpressions() throws HiveException {
+    ExprNodeGenericFuncDesc stringUnary = new ExprNodeGenericFuncDesc();
+    stringUnary.setTypeInfo(TypeInfoFactory.stringTypeInfo);
+    ExprNodeColumnDesc colDesc = new ExprNodeColumnDesc(String.class, "a", "table", false);
+    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
+    children.add(colDesc);
+    stringUnary.setChildren(children);
+
+    Map<String, Integer> columnMap = new HashMap<String, Integer>();
+    columnMap.put("b", 0);
+    columnMap.put("a", 1);
+    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+
+    GenericUDF stringLower = new GenericUDFLower();
+    stringUnary.setGenericUDF(stringLower);
+
+    VectorExpression ve = vc.getVectorExpression(stringUnary);
+
+    assertEquals(StringLower.class, ve.getClass());
+    assertEquals(1, ((StringLower) ve).getColNum());
+    assertEquals(2, ((StringLower) ve).getOutputColumn());
+
+    vc = new VectorizationContext(columnMap, 2);
+
+    ExprNodeGenericFuncDesc anotherUnary = new ExprNodeGenericFuncDesc();
+    anotherUnary.setTypeInfo(TypeInfoFactory.stringTypeInfo);
+    List<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>();
+    children2.add(stringUnary);
+    anotherUnary.setChildren(children2);
+    GenericUDFBridge udfbridge = new GenericUDFBridge("ltrim", false, UDFLTrim.class.getName());
+    anotherUnary.setGenericUDF(udfbridge);
+
+    ve = vc.getVectorExpression(anotherUnary);
+    VectorExpression childVe = ve.getChildExpressions()[0];
+    assertEquals(StringLower.class, childVe.getClass());
+    assertEquals(1, ((StringLower) childVe).getColNum());
+    assertEquals(2, ((StringLower) childVe).getOutputColumn());
+
+    assertEquals(StringLTrim.class, ve.getClass());
+    assertEquals(2, ((StringLTrim) ve).getInputColumn());
+    assertEquals(3, ((StringLTrim) ve).getOutputColumn());
+  }
+
+  @Test
+  public void testMathFunctions() throws HiveException {
+    ExprNodeGenericFuncDesc mathFuncExpr = new ExprNodeGenericFuncDesc();
+    mathFuncExpr.setTypeInfo(TypeInfoFactory.doubleTypeInfo);
+    ExprNodeColumnDesc colDesc1 = new ExprNodeColumnDesc(Integer.class, "a", "table", false);
+    ExprNodeColumnDesc colDesc2 = new ExprNodeColumnDesc(Double.class, "b", "table", false);
+    List<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>();
+    List<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>();
+    children1.add(colDesc1);
+    children2.add(colDesc2);
+
+    Map<String, Integer> columnMap = new HashMap<String, Integer>();
+    columnMap.put("b", 0);
+    columnMap.put("a", 1);
+    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+
+    // Sin(double)
+    GenericUDFBridge gudfBridge = new GenericUDFBridge("sin", false, UDFSin.class.getName());
+    mathFuncExpr.setGenericUDF(gudfBridge);
+    mathFuncExpr.setChildren(children2);
+    VectorExpression ve = vc.getVectorExpression(mathFuncExpr, VectorExpressionDescriptor.Mode.PROJECTION);
+    Assert.assertEquals(FuncSinDoubleToDouble.class, ve.getClass());
+
+    // Round without digits
+    gudfBridge = new GenericUDFBridge("round", false, UDFRound.class.getName());
+    mathFuncExpr.setGenericUDF(gudfBridge);
+    mathFuncExpr.setChildren(children2);
+    ve = vc.getVectorExpression(mathFuncExpr);
+    Assert.assertEquals(FuncRoundDoubleToDouble.class, ve.getClass());
+
+    // Round with digits
+    gudfBridge = new GenericUDFBridge("round", false, UDFRound.class.getName());
+    mathFuncExpr.setGenericUDF(gudfBridge);
+    children2.add(new ExprNodeConstantDesc(4));
+    mathFuncExpr.setChildren(children2);
+    ve = vc.getVectorExpression(mathFuncExpr);
+    Assert.assertEquals(RoundWithNumDigitsDoubleToDouble.class, ve.getClass());
+    Assert.assertEquals(4, ((RoundWithNumDigitsDoubleToDouble) ve).getDecimalPlaces().get());
+
+    // Log with int base
+    gudfBridge = new GenericUDFBridge("log", false, UDFLog.class.getName());
+    mathFuncExpr.setGenericUDF(gudfBridge);
+    children2.clear();
+    children2.add(new ExprNodeConstantDesc(4.0));
+    children2.add(colDesc2);
+    mathFuncExpr.setChildren(children2);
+    ve = vc.getVectorExpression(mathFuncExpr);
+    Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class, ve.getClass());
+    Assert.assertTrue(4 == ((FuncLogWithBaseDoubleToDouble) ve).getBase());
+
+    // Log with default base
+    children2.clear();
+    children2.add(colDesc2);
+    mathFuncExpr.setChildren(children2);
+    ve = vc.getVectorExpression(mathFuncExpr);
+    Assert.assertEquals(FuncLnDoubleToDouble.class, ve.getClass());
+
+    //Log with double base
+    children2.clear();
+    children2.add(new ExprNodeConstantDesc(4.5));
+    children2.add(colDesc2);
+    mathFuncExpr.setChildren(children2);
+    ve = vc.getVectorExpression(mathFuncExpr);
+    Assert.assertEquals(FuncLogWithBaseDoubleToDouble.class, ve.getClass());
+    Assert.assertTrue(4.5 == ((FuncLogWithBaseDoubleToDouble) ve).getBase());
+
+    //Log with int input and double base
+    children2.clear();
+    children2.add(new ExprNodeConstantDesc(4.5));
+    children2.add(colDesc1);
+    mathFuncExpr.setChildren(children2);
+    ve = vc.getVectorExpression(mathFuncExpr);
+    Assert.assertEquals(FuncLogWithBaseLongToDouble.class, ve.getClass());
+    Assert.assertTrue(4.5 == ((FuncLogWithBaseLongToDouble) ve).getBase());
+
+    //Power with double power
+    gudfBridge = new GenericUDFBridge("power", false, UDFPower.class.getName());
+    children2.clear();
+    children2.add(colDesc2);
+    children2.add(new ExprNodeConstantDesc(4.5));
+    mathFuncExpr.setGenericUDF(gudfBridge);
+    mathFuncExpr.setChildren(children2);
+    ve = vc.getVectorExpression(mathFuncExpr);
+    Assert.assertEquals(FuncPowerDoubleToDouble.class, ve.getClass());
+    Assert.assertTrue(4.5 == ((FuncPowerDoubleToDouble) ve).getPower());
+
+    //Round with default decimal places
+    gudfBridge = new GenericUDFBridge("round", false, UDFRound.class.getName());
+    mathFuncExpr.setGenericUDF(gudfBridge);
+    children2.clear();
+    children2.add(colDesc2);
+    mathFuncExpr.setChildren(children2);
+    ve = vc.getVectorExpression(mathFuncExpr);
+    Assert.assertEquals(FuncRoundDoubleToDouble.class, ve.getClass());
+  }
+
+  @Test
+  public void testTimeStampUdfs() throws HiveException {
+    ExprNodeGenericFuncDesc tsFuncExpr = new ExprNodeGenericFuncDesc();
+    tsFuncExpr.setTypeInfo(TypeInfoFactory.intTypeInfo);
+    ExprNodeColumnDesc colDesc1 = new ExprNodeColumnDesc(
+        TypeInfoFactory.timestampTypeInfo, "a", "table", false);
+    List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>();
+    children.add(colDesc1);
+
+    Map<String, Integer> columnMap = new HashMap<String, Integer>();
+    columnMap.put("b", 0);
+    columnMap.put("a", 1);
+    VectorizationContext vc = new VectorizationContext(columnMap, 2);
+
+    //UDFYear
+    GenericUDFBridge gudfBridge = new GenericUDFBridge("year", false, UDFYear.class.getName());
+    tsFuncExpr.setGenericUDF(gudfBridge);
+    tsFuncExpr.setChildren(children);
+    VectorExpression ve = vc.getVectorExpression(tsFuncExpr);
+    Assert.assertEquals(VectorUDFYearLong.class, ve.getClass());
+
+    //GenericUDFToUnixTimeStamp
+    GenericUDFToUnixTimeStamp gudf = new GenericUDFToUnixTimeStamp();
+    tsFuncExpr.setGenericUDF(gudf);
+    tsFuncExpr.setTypeInfo(TypeInfoFactory.longTypeInfo);
+    ve = vc.getVectorExpression(tsFuncExpr);
+    Assert.assertEquals(VectorUDFUnixTimeStampLong.class, ve.getClass());
+  }
 }

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java Mon Oct 28 18:44:01 2013
@@ -181,7 +181,7 @@ public class TestVectorFilterExpressions
     VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(
         5, 2, seed);
     LongColumnVector lcv0 = (LongColumnVector) vrb.cols[0];
-    FilterLongScalarLessLongColumn expr1 = new FilterLongScalarLessLongColumn(0, 15);
+    FilterLongScalarLessLongColumn expr1 = new FilterLongScalarLessLongColumn(15, 0);
 
     //Basic case
     lcv0.vector[0] = 5;
@@ -197,7 +197,7 @@ public class TestVectorFilterExpressions
     assertEquals(1, vrb.selected[0]);
     assertEquals(2, vrb.selected[1]);
 
-    FilterLongScalarGreaterLongColumn expr2 = new FilterLongScalarGreaterLongColumn(0, 18);
+    FilterLongScalarGreaterLongColumn expr2 = new FilterLongScalarGreaterLongColumn(18, 0);
     expr2.evaluate(vrb);
     assertEquals(1, vrb.size);
     assertTrue(vrb.selectedInUse);

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorLogicalExpressions.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorLogicalExpressions.java?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorLogicalExpressions.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorLogicalExpressions.java Mon Oct 28 18:44:01 2013
@@ -347,7 +347,8 @@ public class TestVectorLogicalExpression
     SelectColumnIsTrue expr1 = new SelectColumnIsTrue(0);
     SelectColumnIsFalse expr2 = new SelectColumnIsFalse(1);
 
-    FilterExprOrExpr orExpr = new FilterExprOrExpr(expr1, expr2);
+    FilterExprOrExpr orExpr = new FilterExprOrExpr();
+    orExpr.setChildExpressions(new VectorExpression[] {expr1, expr2});
 
     orExpr.evaluate(batch1);
     orExpr.evaluate(batch2);
@@ -386,7 +387,8 @@ public class TestVectorLogicalExpression
     SelectColumnIsTrue expr1 = new SelectColumnIsTrue(0);
     SelectColumnIsFalse expr2 = new SelectColumnIsFalse(1);
 
-    FilterExprOrExpr orExpr = new FilterExprOrExpr(expr1, expr2);
+    FilterExprOrExpr orExpr = new FilterExprOrExpr();
+    orExpr.setChildExpressions(new VectorExpression[] {expr1, expr2});
 
     orExpr.evaluate(batch1);
 
@@ -419,7 +421,8 @@ public class TestVectorLogicalExpression
     SelectColumnIsTrue expr1 = new SelectColumnIsTrue(0);
     SelectColumnIsFalse expr2 = new SelectColumnIsFalse(1);
 
-    FilterExprOrExpr orExpr = new FilterExprOrExpr(expr1, expr2);
+    FilterExprOrExpr orExpr = new FilterExprOrExpr();
+    orExpr.setChildExpressions(new VectorExpression[] {expr1, expr2});
 
     // Evaluate batch1 so that temporary arrays in the expression
     // have residual values to interfere in later computation
@@ -448,9 +451,10 @@ public class TestVectorLogicalExpression
     SelectColumnIsTrue expr1 = new SelectColumnIsTrue(0);
     SelectColumnIsFalse expr2 = new SelectColumnIsFalse(1);
 
-    FilterExprAndExpr orExpr = new FilterExprAndExpr(expr1, expr2);
+    FilterExprAndExpr andExpr = new FilterExprAndExpr();
+    andExpr.setChildExpressions(new VectorExpression[] {expr1, expr2});
 
-    orExpr.evaluate(batch1);
+    andExpr.evaluate(batch1);
 
     assertEquals(1, batch1.size);
 

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorMathFunctions.java Mon Oct 28 18:44:01 2013
@@ -18,22 +18,38 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
 import java.io.UnsupportedEncodingException;
 import java.util.Arrays;
 
 import junit.framework.Assert;
 
-import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.*;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncACosDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncASinDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncATanDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncAbsDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncAbsLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCeilDoubleToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncCosDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncDegreesDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncExpDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncFloorDoubleToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnLongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog10DoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog10LongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog2DoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLog2LongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRadiansDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRoundDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSignDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSignLongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSqrtDoubleToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncTanDoubleToDouble;
 import org.junit.Test;
 
 
@@ -84,7 +100,7 @@ public class TestVectorMathFunctions {
   @Test
   public void testRoundToDecimalPlaces() {
     VectorizedRowBatch b = getVectorizedRowBatchDoubleInDoubleOut();
-    VectorExpression expr = new RoundWithNumDigitsDoubleToDouble(0, 1);
+    VectorExpression expr = new RoundWithNumDigitsDoubleToDouble(0, 4, 1);
     ((ISetLongArg) expr).setArg(4);  // set number of digits
     expr.evaluate(b);
     DoubleColumnVector resultV = (DoubleColumnVector) b.cols[1];
@@ -386,7 +402,7 @@ public class TestVectorMathFunctions {
     b.size = VectorizedRowBatch.DEFAULT_SIZE;
     int n = b.size;
     v.noNulls = true;
-    VectorExpression expr = new FuncRand(0);
+    VectorExpression expr = new FuncRandNoSeed(0);
     expr.evaluate(b);
     double sum = 0;
     for(int i = 0; i != n; i++) {
@@ -421,7 +437,7 @@ public class TestVectorMathFunctions {
     VectorizedRowBatch b = getVectorizedRowBatchDoubleInDoubleOut();
     DoubleColumnVector resultV = (DoubleColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
-    VectorExpression expr = new FuncLogWithBaseDoubleToDouble(0, 1);
+    VectorExpression expr = new FuncLogWithBaseDoubleToDouble(10.0, 0, 1);
     ((ISetDoubleArg) expr).setArg(10.0d);  // set base
     expr.evaluate(b);
     Assert.assertTrue(equalsWithinTolerance(Math.log(0.5d) / Math.log(10), resultV.vector[4]));
@@ -436,8 +452,8 @@ public class TestVectorMathFunctions {
     DoubleColumnVector resultV = (DoubleColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
     inV.vector[4] = -4.0;
-    VectorExpression expr = new PosModDoubleToDouble(0, 1);
-    ((ISetDoubleArg) expr).setArg(0.3d);  // set base
+    VectorExpression expr = new PosModDoubleToDouble(0, 0.3d, 1);
+    //((ISetDoubleArg) expr).setArg(0.3d);  // set base
     expr.evaluate(b);
     Assert.assertTrue(equalsWithinTolerance(((-4.0d % 0.3d) + 0.3d) % 0.3d, resultV.vector[4]));
 
@@ -445,8 +461,8 @@ public class TestVectorMathFunctions {
     b = getVectorizedRowBatchLongInLongOut();
     LongColumnVector resV2 = (LongColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
-    expr = new PosModLongToLong(0, 1);
-    ((ISetLongArg) expr).setArg(3);
+    expr = new PosModLongToLong(0, 3, 1);
+    //((ISetLongArg) expr).setArg(3);
     expr.evaluate(b);
     Assert.assertEquals(((-2 % 3) + 3) % 3, resV2.vector[0]);
   }
@@ -456,7 +472,7 @@ public class TestVectorMathFunctions {
     VectorizedRowBatch b = getVectorizedRowBatchDoubleInDoubleOut();
     DoubleColumnVector resultV = (DoubleColumnVector) b.cols[1];
     b.cols[0].noNulls = true;
-    VectorExpression expr = new FuncPowerDoubleToDouble(0, 1);
+    VectorExpression expr = new FuncPowerDoubleToDouble(0, 2.0, 1);
     ((ISetDoubleArg) expr).setArg(2.0d);  // set power
     expr.evaluate(b);
     Assert.assertTrue(equalsWithinTolerance(0.5d * 0.5d, resultV.vector[4]));

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorStringExpressions.java Mon Oct 28 18:44:01 2013
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColEqualStringScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringColLessStringColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.StringScalarEqualStringColumn;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.io.Text;
 import org.junit.Test;
 
@@ -209,7 +210,7 @@ public class TestVectorStringExpressions
   public void testStringScalarCompareStringCol() {
     VectorizedRowBatch batch = makeStringBatch();
     VectorExpression expr;
-    expr = new FilterStringScalarEqualStringColumn(0, red2);
+    expr = new FilterStringScalarEqualStringColumn(red2, 0);
     expr.evaluate(batch);
 
     // only red qualifies, and it's in entry 0
@@ -217,7 +218,7 @@ public class TestVectorStringExpressions
     Assert.assertTrue(batch.selected[0] == 0);
 
     batch = makeStringBatch();
-    expr = new FilterStringScalarGreaterStringColumn(0, red2);
+    expr = new FilterStringScalarGreaterStringColumn(red2, 0);
     expr.evaluate(batch);
 
     // only green qualifies, and it's in entry 1
@@ -225,7 +226,7 @@ public class TestVectorStringExpressions
     Assert.assertTrue(batch.selected[0] == 1);
 
     batch = makeStringBatch();
-    expr = new FilterStringScalarLessEqualStringColumn(0, green);
+    expr = new FilterStringScalarLessEqualStringColumn(green, 0);
     expr.evaluate(batch);
 
     // green and red qualify
@@ -239,7 +240,7 @@ public class TestVectorStringExpressions
     VectorizedRowBatch batch = makeStringBatch();
     VectorExpression expr;
 
-    expr = new StringScalarEqualStringColumn(0, red2, 2);
+    expr = new StringScalarEqualStringColumn(red2, 0, 2);
     expr.evaluate(batch);
     Assert.assertEquals(3, batch.size);
     LongColumnVector outVector = (LongColumnVector) batch.cols[2];
@@ -248,7 +249,7 @@ public class TestVectorStringExpressions
     Assert.assertEquals(0, outVector.vector[2]);
 
     batch = makeStringBatch();
-    expr = new StringScalarEqualStringColumn(0, green, 2);
+    expr = new StringScalarEqualStringColumn(green, 0, 2);
     expr.evaluate(batch);
     Assert.assertEquals(3, batch.size);
     outVector = (LongColumnVector) batch.cols[2];
@@ -920,7 +921,7 @@ public class TestVectorStringExpressions
   }
 
   @Test
-  public void testStringLike() {
+  public void testStringLike() throws HiveException {
 
     // has nulls, not repeating
     VectorizedRowBatch batch;
@@ -928,7 +929,7 @@ public class TestVectorStringExpressions
     int initialBatchSize;
     batch = makeStringBatchMixedCharSize();
     pattern = new Text(mixPercentPattern);
-    FilterStringColLikeStringScalar expr = new FilterStringColLikeStringScalar(0, pattern);
+    FilterStringColLikeStringScalar expr = new FilterStringColLikeStringScalar(0, mixPercentPattern);
     expr.evaluate(batch);
 
     // verify that the beginning entry is the only one that matches
@@ -973,48 +974,48 @@ public class TestVectorStringExpressions
     Assert.assertEquals(initialBatchSize, batch.size);
   }
 
-  public void testStringLikePatternType() {
+  public void testStringLikePatternType() throws UnsupportedEncodingException, HiveException {
     FilterStringColLikeStringScalar expr;
 
     // BEGIN pattern
-    expr = new FilterStringColLikeStringScalar(0, new Text("abc%"));
+    expr = new FilterStringColLikeStringScalar(0, "abc%".getBytes());
     Assert.assertEquals(FilterStringColLikeStringScalar.BeginChecker.class,
         expr.checker.getClass());
 
     // END pattern
-    expr = new FilterStringColLikeStringScalar(0, new Text("%abc"));
+    expr = new FilterStringColLikeStringScalar(0, "%abc".getBytes("UTF-8"));
     Assert.assertEquals(FilterStringColLikeStringScalar.EndChecker.class,
         expr.checker.getClass());
 
     // MIDDLE pattern
-    expr = new FilterStringColLikeStringScalar(0, new Text("%abc%"));
+    expr = new FilterStringColLikeStringScalar(0, "%abc%".getBytes());
     Assert.assertEquals(FilterStringColLikeStringScalar.MiddleChecker.class,
         expr.checker.getClass());
 
     // COMPLEX pattern
-    expr = new FilterStringColLikeStringScalar(0, new Text("%abc%de"));
+    expr = new FilterStringColLikeStringScalar(0, "%abc%de".getBytes());
     Assert.assertEquals(FilterStringColLikeStringScalar.ComplexChecker.class,
         expr.checker.getClass());
 
     // NONE pattern
-    expr = new FilterStringColLikeStringScalar(0, new Text("abc"));
+    expr = new FilterStringColLikeStringScalar(0, "abc".getBytes());
     Assert.assertEquals(FilterStringColLikeStringScalar.NoneChecker.class,
         expr.checker.getClass());
   }
 
-  public void testStringLikeMultiByte() {
+  public void testStringLikeMultiByte() throws HiveException {
     FilterStringColLikeStringScalar expr;
     VectorizedRowBatch batch;
 
     // verify that a multi byte LIKE expression matches a matching string
     batch = makeStringBatchMixedCharSize();
-    expr = new FilterStringColLikeStringScalar(0, new Text("%" + multiByte + "%"));
+    expr = new FilterStringColLikeStringScalar(0, ("%" + multiByte + "%").getBytes());
     expr.evaluate(batch);
     Assert.assertEquals(batch.size, 1);
 
     // verify that a multi byte LIKE expression doesn't match a non-matching string
     batch = makeStringBatchMixedCharSize();
-    expr = new FilterStringColLikeStringScalar(0, new Text("%" + multiByte + "x"));
+    expr = new FilterStringColLikeStringScalar(0, ("%" + multiByte + "x").getBytes());
     expr.evaluate(batch);
     Assert.assertEquals(batch.size, 0);
   }
@@ -1024,7 +1025,7 @@ public class TestVectorStringExpressions
 
     // has nulls, not repeating
     VectorizedRowBatch batch = makeStringBatch();
-    StringConcatColScalar expr = new StringConcatColScalar(0, 1, red);
+    StringConcatColScalar expr = new StringConcatColScalar(0, red, 1);
     expr.evaluate(batch);
     BytesColumnVector outCol = (BytesColumnVector) batch.cols[1];
 

Modified: hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java (original)
+++ hive/branches/maven/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java Mon Oct 28 18:44:01 2013
@@ -268,7 +268,7 @@ public class TestOrcFile {
         + "binary,string1:string,middle:struct<list:array<struct<int1:int,"
         + "string1:string>>>,list:array<struct<int1:int,string1:string>>,"
         + "map:map<string,struct<int1:int,string1:string>>,ts:timestamp,"
-        + "decimal1:decimal>", readerInspector.getTypeName());
+        + "decimal1:decimal(65,30)>", readerInspector.getTypeName());
     List<? extends StructField> fields = readerInspector
         .getAllStructFieldRefs();
     BooleanObjectInspector bo = (BooleanObjectInspector) readerInspector
@@ -1006,8 +1006,8 @@ public class TestOrcFile {
       } else {
         union.set((byte) 1, new Text(new Integer(i*i).toString()));
       }
-      value = HiveDecimal.create(new BigInteger(118, rand),
-          rand.nextInt(36));
+      value = HiveDecimal.create(new BigInteger(104, rand),
+          rand.nextInt(28));
       row.setFieldValue(2, value);
       if (maxValue.compareTo(value) < 0) {
         maxValue = value;
@@ -1036,7 +1036,8 @@ public class TestOrcFile {
     assertEquals(303, stats.getNumberOfValues());
     assertEquals(HiveDecimal.create("-5643.234"), stats.getMinimum());
     assertEquals(maxValue, stats.getMaximum());
-    assertEquals(null, stats.getSum());
+    // TODO: fix this
+//    assertEquals(null,stats.getSum());
     int stripeCount = 0;
     int rowCount = 0;
     long currentOffset = -1;
@@ -1060,7 +1061,7 @@ public class TestOrcFile {
     row = (OrcStruct) rows.next(null);
     assertEquals(1, rows.getRowNumber());
     inspector = reader.getObjectInspector();
-    assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal>",
+    assertEquals("struct<time:timestamp,union:uniontype<int,string>,decimal:decimal(65,30)>",
         inspector.getTypeName());
     assertEquals(Timestamp.valueOf("2000-03-12 15:00:00"),
         row.getFieldValue(0));
@@ -1108,8 +1109,8 @@ public class TestOrcFile {
         assertEquals(1, union.getTag());
         assertEquals(new Text(new Integer(i*i).toString()), union.getObject());
       }
-      assertEquals(HiveDecimal.create(new BigInteger(118, rand),
-                                   rand.nextInt(36)), row.getFieldValue(2));
+      assertEquals(HiveDecimal.create(new BigInteger(104, rand),
+                                   rand.nextInt(28)), row.getFieldValue(2));
     }
     for(int i=0; i < 5000; ++i) {
       row = (OrcStruct) rows.next(row);

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/decimal_1.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/decimal_1.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/decimal_1.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/decimal_1.q Mon Oct 28 18:44:01 2013
@@ -1,10 +1,10 @@
 drop table decimal_1;
 
-create table decimal_1 (t decimal);
+create table decimal_1 (t decimal(4,2));
 alter table decimal_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe';
 
 insert overwrite table decimal_1
-  select cast('17.29' as decimal) from src limit 1;
+  select cast('17.29' as decimal(4,2)) from src limit 1;
 select cast(t as boolean) from decimal_1 limit 1;
 select cast(t as tinyint) from decimal_1 limit 1;
 select cast(t as smallint) from decimal_1 limit 1;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/decimal_2.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/decimal_2.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/decimal_2.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/decimal_2.q Mon Oct 28 18:44:01 2013
@@ -1,10 +1,10 @@
 drop table decimal_2;
 
-create table decimal_2 (t decimal);
+create table decimal_2 (t decimal(18,9));
 alter table decimal_2 set serde 'org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe';
 
 insert overwrite table decimal_2
-  select cast('17.29' as decimal) from src limit 1;
+  select cast('17.29' as decimal(4,2)) from src limit 1;
 
 select cast(t as boolean) from decimal_2 limit 1;
 select cast(t as tinyint) from decimal_2 limit 1;
@@ -16,7 +16,7 @@ select cast(t as double) from decimal_2 
 select cast(t as string) from decimal_2 limit 1;
 
 insert overwrite table decimal_2
-  select cast('3404045.5044003' as decimal) from src limit 1;
+  select cast('3404045.5044003' as decimal(18,9)) from src limit 1;
 
 select cast(t as boolean) from decimal_2 limit 1;
 select cast(t as tinyint) from decimal_2 limit 1;
@@ -27,14 +27,14 @@ select cast(t as float) from decimal_2 l
 select cast(t as double) from decimal_2 limit 1;
 select cast(t as string) from decimal_2 limit 1;
 
-select cast(3.14 as decimal) from decimal_2 limit 1;
-select cast(cast(3.14 as float) as decimal) from decimal_2 limit 1;
-select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal) from decimal_2 limit 1;
+select cast(3.14 as decimal(4,2)) from decimal_2 limit 1;
+select cast(cast(3.14 as float) as decimal(4,2)) from decimal_2 limit 1;
+select cast(cast('2012-12-19 11:12:19.1234567' as timestamp) as decimal(30,8)) from decimal_2 limit 1;
 select cast(true as decimal) from decimal_2 limit 1;
 select cast(3Y as decimal) from decimal_2 limit 1;
 select cast(3S as decimal) from decimal_2 limit 1;
 select cast(cast(3 as int) as decimal) from decimal_2 limit 1;
 select cast(3L as decimal) from decimal_2 limit 1;
-select cast(0.99999999999999999999 as decimal) from decimal_2 limit 1;
-select cast('0.99999999999999999999' as decimal) from decimal_2 limit 1;
+select cast(0.99999999999999999999 as decimal(20,19)) from decimal_2 limit 1;
+select cast('0.99999999999999999999' as decimal(20,20)) from decimal_2 limit 1;
 drop table decimal_2;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/decimal_3.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/decimal_3.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/decimal_3.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/decimal_3.q Mon Oct 28 18:44:01 2013
@@ -1,6 +1,6 @@
 DROP TABLE IF EXISTS DECIMAL_3;
 
-CREATE TABLE DECIMAL_3(key decimal, value int) 
+CREATE TABLE DECIMAL_3(key decimal(65,30), value int) 
 ROW FORMAT DELIMITED
    FIELDS TERMINATED BY ' '
 STORED AS TEXTFILE;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/decimal_4.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/decimal_4.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/decimal_4.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/decimal_4.q Mon Oct 28 18:44:01 2013
@@ -1,12 +1,12 @@
 DROP TABLE IF EXISTS DECIMAL_4_1;
 DROP TABLE IF EXISTS DECIMAL_4_2;
 
-CREATE TABLE DECIMAL_4_1(key decimal, value int) 
+CREATE TABLE DECIMAL_4_1(key decimal(35,25), value int) 
 ROW FORMAT DELIMITED
    FIELDS TERMINATED BY ' '
 STORED AS TEXTFILE;
 
-CREATE TABLE DECIMAL_4_2(key decimal, value decimal) 
+CREATE TABLE DECIMAL_4_2(key decimal(35,25), value decimal(35,25)) 
 STORED AS ORC;
 
 LOAD DATA LOCAL INPATH '../../data/files/kv7.txt' INTO TABLE DECIMAL_4_1;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/decimal_join.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/decimal_join.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/decimal_join.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/decimal_join.q Mon Oct 28 18:44:01 2013
@@ -1,6 +1,6 @@
 -- HIVE-5292 Join on decimal columns fails
 
-create table src_dec (key decimal, value string);
+create table src_dec (key decimal(3,0), value string);
 load data local inpath '../../data/files/kv1.txt' into table src_dec;
 
 select * from src_dec a join src_dec b on a.key=b.key+450;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/decimal_precision.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/decimal_precision.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/decimal_precision.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/decimal_precision.q Mon Oct 28 18:44:01 2013
@@ -1,6 +1,6 @@
 DROP TABLE IF EXISTS DECIMAL_PRECISION;
 
-CREATE TABLE DECIMAL_PRECISION(dec decimal) 
+CREATE TABLE DECIMAL_PRECISION(dec decimal(60,30)) 
 ROW FORMAT DELIMITED
    FIELDS TERMINATED BY ' '
 STORED AS TEXTFILE;
@@ -17,11 +17,11 @@ SELECT dec, dec * dec FROM DECIMAL_PRECI
 
 SELECT avg(dec), sum(dec) FROM DECIMAL_PRECISION;
 
-SELECT dec * cast('123456789012345678901234567890.123456789' as decimal) FROM DECIMAL_PRECISION LIMIT 1;
-SELECT * from DECIMAL_PRECISION WHERE dec > cast('123456789012345678901234567890.123456789' as decimal) LIMIT 1;
+SELECT dec * cast('123456789012345678901234567890.123456789' as decimal(39,9)) FROM DECIMAL_PRECISION LIMIT 1;
+SELECT * from DECIMAL_PRECISION WHERE dec > cast('123456789012345678901234567890.123456789' as decimal(39,9)) LIMIT 1;
 SELECT dec * 123456789012345678901234567890.123456789 FROM DECIMAL_PRECISION LIMIT 1;
 
-SELECT MIN(cast('123456789012345678901234567890.123456789' as decimal)) FROM DECIMAL_PRECISION;
-SELECT COUNT(cast('123456789012345678901234567890.123456789' as decimal)) FROM DECIMAL_PRECISION;
+SELECT MIN(cast('123456789012345678901234567890.123456789' as decimal(39,9))) FROM DECIMAL_PRECISION;
+SELECT COUNT(cast('123456789012345678901234567890.123456789' as decimal(39,9))) FROM DECIMAL_PRECISION;
 
 DROP TABLE DECIMAL_PRECISION;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/decimal_udf.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/decimal_udf.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/decimal_udf.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/decimal_udf.q Mon Oct 28 18:44:01 2013
@@ -1,6 +1,6 @@
 DROP TABLE IF EXISTS DECIMAL_UDF;
 
-CREATE TABLE DECIMAL_UDF (key decimal, value int) 
+CREATE TABLE DECIMAL_UDF (key decimal(65,30), value int) 
 ROW FORMAT DELIMITED
    FIELDS TERMINATED BY ' '
 STORED AS TEXTFILE;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/groupby2_map_multi_distinct.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/groupby2_map_multi_distinct.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/groupby2_map_multi_distinct.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/groupby2_map_multi_distinct.q Mon Oct 28 18:44:01 2013
@@ -12,3 +12,14 @@ FROM src
 INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.value,5)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
 
 SELECT dest1.* FROM dest1;
+
+-- HIVE-5560 when group by key is used in distinct funtion, invalid result are returned
+
+EXPLAIN
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.key,1,1)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
+
+FROM src
+INSERT OVERWRITE TABLE dest1 SELECT substr(src.key,1,1), count(DISTINCT substr(src.key,1,1)), concat(substr(src.key,1,1),sum(substr(src.value,5))), sum(DISTINCT substr(src.value, 5)), count(src.value) GROUP BY substr(src.key,1,1);
+
+SELECT dest1.* FROM dest1;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/import_exported_table.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/import_exported_table.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/import_exported_table.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/import_exported_table.q Mon Oct 28 18:44:01 2013
@@ -6,5 +6,5 @@ IMPORT FROM '/tmp/test/exported_table';
 DESCRIBE j1_41;
 SELECT * from j1_41;
 
-dfs -rmr hdfs:///tmp/test/exported_table;
+dfs -rmr hdfs:///tmp/test;
 

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/orc_predicate_pushdown.q Mon Oct 28 18:44:01 2013
@@ -7,7 +7,7 @@ CREATE TABLE orc_pred(t tinyint,
            bo boolean,
            s string,
            ts timestamp,
-           dec decimal,
+           dec decimal(4,2),
            bin binary)
 STORED AS ORC;
 
@@ -22,7 +22,7 @@ CREATE TABLE staging(t tinyint,
            bo boolean,
            s string,
            ts timestamp,
-           dec decimal,
+           dec decimal(4,2),
            bin binary)
 ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
 STORED AS TEXTFILE;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/ptf_decimal.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/ptf_decimal.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/ptf_decimal.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/ptf_decimal.q Mon Oct 28 18:44:01 2013
@@ -9,7 +9,7 @@ CREATE TABLE part( 
     p_type STRING,
     p_size INT,
     p_container STRING,
-    p_retailprice DECIMAL,
+    p_retailprice DECIMAL(6,2),
     p_comment STRING
 );
 

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/serde_regex.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/serde_regex.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/serde_regex.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/serde_regex.q Mon Oct 28 18:44:01 2013
@@ -42,7 +42,7 @@ DROP TABLE serde_regex;
 
 EXPLAIN
 CREATE TABLE serde_regex1(
-  key decimal,
+  key decimal(65,30),
   value int)
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.RegexSerDe'
 WITH SERDEPROPERTIES (
@@ -51,7 +51,7 @@ WITH SERDEPROPERTIES (
 STORED AS TEXTFILE;
 
 CREATE TABLE serde_regex1(
-  key decimal,
+  key decimal(65,30),
   value int)
 ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.RegexSerDe'
 WITH SERDEPROPERTIES (

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/udf_concat.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/udf_concat.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/udf_concat.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/udf_concat.q Mon Oct 28 18:44:01 2013
@@ -13,3 +13,9 @@ SELECT
   concat(1),
   concat('1234', 'abc', 'extra argument')
 FROM src LIMIT 1;
+
+-- binary/mixed
+SELECT
+  concat(cast('ab' as binary), cast('cd' as binary)),
+  concat('ab', cast('cd' as binary))
+FROM src LIMIT 1;

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/udf_pmod.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/udf_pmod.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/udf_pmod.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/udf_pmod.q Mon Oct 28 18:44:01 2013
@@ -16,5 +16,5 @@ SELECT pmod(CAST(-100 AS BIGINT),CAST(9 
 
 SELECT pmod(CAST(-100.91 AS FLOAT),CAST(9.8 AS FLOAT)), pmod(CAST(-50.1 AS FLOAT),CAST(101.8 AS FLOAT)), pmod(CAST(-100.91 AS FLOAT),CAST(29.75 AS FLOAT)) FROM src LIMIT 1;
 SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src LIMIT 1;
-SELECT pmod(CAST(-100.91 AS DECIMAL),CAST(9.8 AS DECIMAL)), pmod(CAST(-50.1 AS DECIMAL),CAST(101.8 AS DECIMAL)), pmod(CAST(-100.91 AS DECIMAL),CAST(29.75 AS DECIMAL)) FROM src LIMIT 1;
+SELECT pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(9.8 AS DECIMAL(2,1))), pmod(CAST(-50.1 AS DECIMAL(3,1)),CAST(101.8 AS DECIMAL(4,1))), pmod(CAST(-100.91 AS DECIMAL(5,2)),CAST(29.75 AS DECIMAL(4,2))) FROM src LIMIT 1;
 

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_double.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_double.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_double.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_double.q Mon Oct 28 18:44:01 2013
@@ -9,7 +9,7 @@ SELECT CAST(-129 AS DOUBLE) FROM src LIM
 SELECT CAST(CAST(-1025 AS BIGINT) AS DOUBLE) FROM src LIMIT 1;
 
 SELECT CAST(CAST(-3.14 AS FLOAT) AS DOUBLE) FROM src LIMIT 1;
-SELECT CAST(CAST(-3.14 AS DECIMAL) AS DOUBLE) FROM src LIMIT 1;
+SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS DOUBLE) FROM src LIMIT 1;
 
 SELECT CAST('-38.14' AS DOUBLE) FROM src LIMIT 1;
 

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_float.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_float.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_float.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_float.q Mon Oct 28 18:44:01 2013
@@ -9,7 +9,7 @@ SELECT CAST(-129 AS FLOAT) FROM src LIMI
 SELECT CAST(CAST(-1025 AS BIGINT) AS FLOAT) FROM src LIMIT 1;
 
 SELECT CAST(CAST(-3.14 AS DOUBLE) AS FLOAT) FROM src LIMIT 1;
-SELECT CAST(CAST(-3.14 AS DECIMAL) AS FLOAT) FROM src LIMIT 1;
+SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS FLOAT) FROM src LIMIT 1;
 
 SELECT CAST('-38.14' AS FLOAT) FROM src LIMIT 1;
 

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_string.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_string.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_string.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/udf_to_string.q Mon Oct 28 18:44:01 2013
@@ -10,7 +10,7 @@ SELECT CAST(CAST(-1025 AS BIGINT) AS STR
 
 SELECT CAST(CAST(-3.14 AS DOUBLE) AS STRING) FROM src LIMIT 1;
 SELECT CAST(CAST(-3.14 AS FLOAT) AS STRING) FROM src LIMIT 1;
-SELECT CAST(CAST(-3.14 AS DECIMAL) AS STRING) FROM src LIMIT 1;
+SELECT CAST(CAST(-3.14 AS DECIMAL(3,2)) AS STRING) FROM src LIMIT 1;
 
 SELECT CAST('Foo' AS STRING) FROM src LIMIT 1;
 

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/windowing_expressions.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/windowing_expressions.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/windowing_expressions.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/windowing_expressions.q Mon Oct 28 18:44:01 2013
@@ -27,7 +27,7 @@ create table over10k(
            bo boolean,
            s string,
 	   ts timestamp, 
-           dec decimal,  
+           dec decimal(4,2),  
            bin binary)
        row format delimited
        fields terminated by '|';

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/windowing_multipartitioning.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/windowing_multipartitioning.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/windowing_multipartitioning.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/windowing_multipartitioning.q Mon Oct 28 18:44:01 2013
@@ -10,7 +10,7 @@ create table over10k(
            bo boolean,
            s string,
 	   ts timestamp, 
-           dec decimal,  
+           dec decimal(4,2),  
            bin binary)
        row format delimited
        fields terminated by '|';

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/windowing_navfn.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/windowing_navfn.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/windowing_navfn.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/windowing_navfn.q Mon Oct 28 18:44:01 2013
@@ -9,8 +9,8 @@ create table over10k(
            d double,
            bo boolean,
            s string,
-	   ts timestamp, 
-           dec decimal,  
+           ts timestamp, 
+           dec decimal(4,2),  
            bin binary)
        row format delimited
        fields terminated by '|';

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/windowing_ntile.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/windowing_ntile.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/windowing_ntile.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/windowing_ntile.q Mon Oct 28 18:44:01 2013
@@ -10,7 +10,7 @@ create table over10k(
            bo boolean,
            s string,
 	   ts timestamp, 
-           dec decimal,  
+           dec decimal(4,2),  
            bin binary)
        row format delimited
        fields terminated by '|';

Modified: hive/branches/maven/ql/src/test/queries/clientpositive/windowing_rank.q
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/queries/clientpositive/windowing_rank.q?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/queries/clientpositive/windowing_rank.q (original)
+++ hive/branches/maven/ql/src/test/queries/clientpositive/windowing_rank.q Mon Oct 28 18:44:01 2013
@@ -10,7 +10,7 @@ create table over10k(
            bo boolean,
            s string,
 	   ts timestamp, 
-           dec decimal,  
+           dec decimal(4,2),  
            bin binary)
        row format delimited
        fields terminated by '|';

Modified: hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_1.q.out Mon Oct 28 18:44:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToInteger with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal)  _FUNC_(double)  _FUNC_(float)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToInteger with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(float)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_2.q.out Mon Oct 28 18:44:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToByte with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal)  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToByte with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(void)  

Modified: hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_3.q.out Mon Oct 28 18:44:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToShort with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal)  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToShort with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_4.q.out Mon Oct 28 18:44:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToLong with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal)  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToLong with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_5.q.out Mon Oct 28 18:44:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal)  _FUNC_(double)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientnegative/invalid_cast_from_binary_6.q.out Mon Oct 28 18:44:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: create table tbl (a binary)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@tbl
-FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToDouble with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal)  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: SemanticException Line 0:-1 Wrong arguments 'a': No matching method for class org.apache.hadoop.hive.ql.udf.UDFToDouble with (binary). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(float)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/maven/ql/src/test/results/clientnegative/wrong_column_type.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientnegative/wrong_column_type.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientnegative/wrong_column_type.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientnegative/wrong_column_type.q.out Mon Oct 28 18:44:01 2013
@@ -3,4 +3,4 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE dest1(a float)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@dest1
-FAILED: NoMatchingMethodException No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array<double>). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal)  _FUNC_(double)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  
+FAILED: NoMatchingMethodException No matching method for class org.apache.hadoop.hive.ql.udf.UDFToFloat with (array<double>). Possible choices: _FUNC_(bigint)  _FUNC_(boolean)  _FUNC_(decimal(65,30))  _FUNC_(double)  _FUNC_(int)  _FUNC_(smallint)  _FUNC_(string)  _FUNC_(timestamp)  _FUNC_(tinyint)  _FUNC_(void)  

Modified: hive/branches/maven/ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientpositive/auto_join18_multi_distinct.q.out Mon Oct 28 18:44:01 2013
@@ -85,7 +85,7 @@ STAGE PLANS:
                 expr: count(DISTINCT KEY._col1:1._col0)
           bucketGroup: false
           keys:
-                expr: KEY._col1:1._col0
+                expr: KEY._col0
                 type: string
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2
@@ -306,4 +306,4 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Input: default@src1
 #### A masked pattern was here ####
-14748607855
+40694575227

Modified: hive/branches/maven/ql/src/test/results/clientpositive/count.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientpositive/count.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientpositive/count.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientpositive/count.q.out Mon Oct 28 18:44:01 2013
@@ -491,10 +491,10 @@ STAGE PLANS:
           aggregations:
                 expr: count(1)
                 expr: count()
-                expr: count(KEY._col0:14._col0)
-                expr: count(KEY._col0:14._col1)
-                expr: count(KEY._col0:14._col2)
-                expr: count(KEY._col0:14._col3)
+                expr: count(KEY._col0:0._col0)
+                expr: count(KEY._col0:1._col0)
+                expr: count(KEY._col0:2._col0)
+                expr: count(KEY._col0:3._col0)
                 expr: count(DISTINCT KEY._col0:0._col0)
                 expr: count(DISTINCT KEY._col0:1._col0)
                 expr: count(DISTINCT KEY._col0:2._col0)

Modified: hive/branches/maven/ql/src/test/results/clientpositive/decimal_1.q.out
URL: http://svn.apache.org/viewvc/hive/branches/maven/ql/src/test/results/clientpositive/decimal_1.q.out?rev=1536480&r1=1536479&r2=1536480&view=diff
==============================================================================
--- hive/branches/maven/ql/src/test/results/clientpositive/decimal_1.q.out (original)
+++ hive/branches/maven/ql/src/test/results/clientpositive/decimal_1.q.out Mon Oct 28 18:44:01 2013
@@ -2,9 +2,9 @@ PREHOOK: query: drop table decimal_1
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: drop table decimal_1
 POSTHOOK: type: DROPTABLE
-PREHOOK: query: create table decimal_1 (t decimal)
+PREHOOK: query: create table decimal_1 (t decimal(4,2))
 PREHOOK: type: CREATETABLE
-POSTHOOK: query: create table decimal_1 (t decimal)
+POSTHOOK: query: create table decimal_1 (t decimal(4,2))
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@decimal_1
 PREHOOK: query: alter table decimal_1 set serde 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
@@ -16,12 +16,12 @@ POSTHOOK: type: ALTERTABLE_SERIALIZER
 POSTHOOK: Input: default@decimal_1
 POSTHOOK: Output: default@decimal_1
 PREHOOK: query: insert overwrite table decimal_1
-  select cast('17.29' as decimal) from src limit 1
+  select cast('17.29' as decimal(4,2)) from src limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Output: default@decimal_1
 POSTHOOK: query: insert overwrite table decimal_1
-  select cast('17.29' as decimal) from src limit 1
+  select cast('17.29' as decimal(4,2)) from src limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 POSTHOOK: Output: default@decimal_1