You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2014/03/15 00:52:34 UTC
svn commit: r1577763 [1/3] - in /hive/trunk:
common/src/java/org/apache/hadoop/hive/conf/
common/src/java/org/apache/hive/common/ conf/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/udf/generic/ ql/src/test/org/apach...
Author: jdere
Date: Fri Mar 14 23:52:33 2014
New Revision: 1577763
URL: http://svn.apache.org/r1577763
Log:
HIVE-6012: restore backward compatibility of arithmetic operations (reviewed by Gunther/Sergey)
Added:
hive/trunk/common/src/java/org/apache/hive/common/HiveCompat.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java
hive/trunk/ql/src/test/queries/clientpositive/ansi_sql_arithmetic.q
hive/trunk/ql/src/test/results/clientpositive/ansi_sql_arithmetic.q.out
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/trunk/conf/hive-default.xml.template
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java
hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out
hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
hive/trunk/ql/src/test/results/clientpositive/udf_pmod.q.out
hive/trunk/ql/src/test/results/clientpositive/vectorization_15.q.out
hive/trunk/ql/src/test/results/clientpositive/vectorization_5.q.out
hive/trunk/ql/src/test/results/clientpositive/vectorization_short_regress.q.out
hive/trunk/ql/src/test/results/clientpositive/windowing_expressions.q.out
Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri Mar 14 23:52:33 2014
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.shims.Shim
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
+import org.apache.hive.common.HiveCompat;
import javax.security.auth.login.LoginException;
import java.io.*;
@@ -970,7 +971,13 @@ public class HiveConf extends Configurat
// column: implies column names can contain any character.
HIVE_QUOTEDID_SUPPORT("hive.support.quoted.identifiers", "column",
new PatternValidator("none", "column")),
- USERS_IN_ADMIN_ROLE("hive.users.in.admin.role","")
+ USERS_IN_ADMIN_ROLE("hive.users.in.admin.role",""),
+
+ // Enable (configurable) deprecated behaviors by setting desired level of backward compatbility
+ // Setting to 0.12:
+ // Maintains division behavior: int / int => double
+ // Setting to 0.13:
+ HIVE_COMPAT("hive.compat", HiveCompat.DEFAULT_COMPAT_LEVEL)
;
public final String varname;
Added: hive/trunk/common/src/java/org/apache/hive/common/HiveCompat.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hive/common/HiveCompat.java?rev=1577763&view=auto
==============================================================================
--- hive/trunk/common/src/java/org/apache/hive/common/HiveCompat.java (added)
+++ hive/trunk/common/src/java/org/apache/hive/common/HiveCompat.java Fri Mar 14 23:52:33 2014
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.common;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+public class HiveCompat {
+
+ private static Log LOG = LogFactory.getLog(HiveCompat.class);
+
+ /**
+ * Enum to represent a level of backward compatibility support.
+ *
+ */
+ public enum CompatLevel {
+ HIVE_0_12("0.12", 0, 12),
+ HIVE_0_13("0.13", 0, 13);
+
+ public final String value;
+ public final int majorVersion;
+ public final int minorVersion;
+
+ CompatLevel(String val, int majorVersion, int minorVersion) {
+ this.value = val;
+ this.majorVersion = majorVersion;
+ this.minorVersion = minorVersion;
+ }
+ }
+
+ public static final String DEFAULT_COMPAT_LEVEL = CompatLevel.HIVE_0_12.value;
+ public static final String LATEST_COMPAT_LEVEL = getLastCompatLevel().value;
+
+ /**
+ * Returned the configured compatibility level
+ * @param hconf Hive configuration
+ * @return
+ */
+ public static CompatLevel getCompatLevel(HiveConf hconf) {
+ return getCompatLevel(HiveConf.getVar(hconf, HiveConf.ConfVars.HIVE_COMPAT));
+ }
+
+ public static CompatLevel getCompatLevel(String compatStr) {
+ if (compatStr.equalsIgnoreCase("latest")) {
+ compatStr = LATEST_COMPAT_LEVEL;
+ }
+
+ for (CompatLevel cl : CompatLevel.values()) {
+ if (cl.value.equals(compatStr)) {
+ return cl;
+ }
+ }
+
+ LOG.error("Could not find CompatLevel for " + compatStr
+ + ", using default of " + DEFAULT_COMPAT_LEVEL);
+ return getCompatLevel(DEFAULT_COMPAT_LEVEL);
+ }
+
+ private static CompatLevel getLastCompatLevel() {
+ CompatLevel[] compatLevels = CompatLevel.values();
+ return compatLevels[compatLevels.length - 1];
+ }
+}
Modified: hive/trunk/conf/hive-default.xml.template
URL: http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Fri Mar 14 23:52:33 2014
@@ -2455,4 +2455,12 @@
</description>
</property>
+<property>
+ <name>hive.compat</name>
+ <value>0.12</value>
+ <description>
+ Enable (configurable) deprecated behaviors by setting desired level of backward compatbility
+ </description>
+</property>
+
</configuration>
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Fri Mar 14 23:52:33 2014
@@ -1490,6 +1490,14 @@ public final class FunctionRegistry {
}
if (clonedUDF != null) {
+ // Copy info that may be required in the new copy.
+ // The SettableUDF calls below could be replaced using this mechanism as well.
+ try {
+ genericUDF.copyToNewInstance(clonedUDF);
+ } catch (UDFArgumentException err) {
+ throw new IllegalArgumentException(err);
+ }
+
// The original may have settable info that needs to be added to the new copy.
if (genericUDF instanceof SettableUDF) {
try {
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDF.java Fri Mar 14 23:52:33 2014
@@ -198,4 +198,16 @@ public abstract class GenericUDF impleme
public String getUdfName() {
return getClass().getName();
}
+
+ /**
+ * Some information may be set during initialize() which needs to be saved when the UDF is copied.
+ * This will be called by FunctionRegistry.cloneGenericUDF()
+ */
+ public void copyToNewInstance(Object newInstance) throws UDFArgumentException {
+ // newInstance should always be the same type of object as this
+ if (this.getClass() != newInstance.getClass()) {
+ throw new UDFArgumentException("Invalid copy between " + this.getClass().getName()
+ + " and " + newInstance.getClass().getName());
+ }
+ }
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java Fri Mar 14 23:52:33 2014
@@ -22,12 +22,14 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.NoMatchingMethodException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -40,6 +42,7 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -48,6 +51,8 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
+import org.apache.hive.common.HiveCompat;
+import org.apache.hive.common.HiveCompat.CompatLevel;
/**
* GenericUDF Base Class for operations.
@@ -72,6 +77,9 @@ public abstract class GenericUDFBaseNume
protected DoubleWritable doubleWritable = new DoubleWritable();
protected HiveDecimalWritable decimalWritable = new HiveDecimalWritable();
+ protected boolean confLookupNeeded = true;
+ protected boolean ansiSqlArithmetic = false;
+
public GenericUDFBaseNumeric() {
opName = getClass().getSimpleName();
}
@@ -93,6 +101,16 @@ public abstract class GenericUDFBaseNume
}
}
+ // During map/reduce tasks, there may not be a valid HiveConf from the SessionState.
+ // So lookup and save any needed conf information during query compilation in the Hive conf
+ // (where there should be valid HiveConf from SessionState). Plan serialization will ensure
+ // we have access to these values in the map/reduce tasks.
+ if (confLookupNeeded) {
+ CompatLevel compatLevel = HiveCompat.getCompatLevel(SessionState.get().getConf());
+ ansiSqlArithmetic = compatLevel.ordinal() > CompatLevel.HIVE_0_12.ordinal();
+ confLookupNeeded = false;
+ }
+
leftOI = (PrimitiveObjectInspector) arguments[0];
rightOI = (PrimitiveObjectInspector) arguments[1];
resultOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
@@ -203,13 +221,44 @@ public abstract class GenericUDFBaseNume
// If any of the type isn't exact, double is chosen.
if (!FunctionRegistry.isExactNumericType(left) || !FunctionRegistry.isExactNumericType(right)) {
- return TypeInfoFactory.doubleTypeInfo;
+ return deriveResultApproxTypeInfo();
}
return deriveResultExactTypeInfo();
}
/**
+ * Default implementation for getting the approximate type info for the operator result.
+ * Divide operator overrides this.
+ * @return
+ */
+ protected PrimitiveTypeInfo deriveResultApproxTypeInfo() {
+ PrimitiveTypeInfo left = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(leftOI);
+ PrimitiveTypeInfo right = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(rightOI);
+
+ // string types get converted to double
+ if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(left.getPrimitiveCategory())
+ == PrimitiveGrouping.STRING_GROUP) {
+ left = TypeInfoFactory.doubleTypeInfo;
+ }
+ if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(right.getPrimitiveCategory())
+ == PrimitiveGrouping.STRING_GROUP) {
+ right = TypeInfoFactory.doubleTypeInfo;
+ }
+
+ // Use type promotion
+ PrimitiveCategory commonCat = FunctionRegistry.getCommonCategory(left, right);
+ if (commonCat == PrimitiveCategory.DECIMAL) {
+ // Hive 0.12 behavior where double * decimal -> decimal is gone.
+ return TypeInfoFactory.doubleTypeInfo;
+ } else if (commonCat == null) {
+ return TypeInfoFactory.doubleTypeInfo;
+ } else {
+ return left.getPrimitiveCategory() == commonCat ? left : right;
+ }
+ }
+
+ /**
* Default implementation for getting the exact type info for the operator result. It worked for all
* but divide operator.
*
@@ -247,4 +296,10 @@ public abstract class GenericUDFBaseNume
return "(" + children[0] + " " + opDisplayName + " " + children[1] + ")";
}
+ public void copyToNewInstance(Object newInstance) throws UDFArgumentException {
+ super.copyToNewInstance(newInstance);
+ GenericUDFBaseNumeric other = (GenericUDFBaseNumeric) newInstance;
+ other.confLookupNeeded = this.confLookupNeeded;
+ other.ansiSqlArithmetic = this.ansiSqlArithmetic;
+ }
}
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPDivide.java Fri Mar 14 23:52:33 2014
@@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColDivideLongColumn;
@@ -27,9 +28,11 @@ import org.apache.hadoop.hive.ql.exec.ve
import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
/**
* Note that in SQL, the return type of divide is not necessarily the same
@@ -55,10 +58,34 @@ public class GenericUDFOPDivide extends
@Override
protected PrimitiveTypeInfo deriveResultExactTypeInfo() {
+ if (ansiSqlArithmetic) {
+ return deriveResultExactTypeInfoAnsiSql();
+ }
+ return deriveResultExactTypeInfoBackwardsCompat();
+ }
+
+ protected PrimitiveTypeInfo deriveResultExactTypeInfoAnsiSql() {
// No type promotion. Everything goes to decimal.
return deriveResultDecimalTypeInfo();
}
+ protected PrimitiveTypeInfo deriveResultExactTypeInfoBackwardsCompat() {
+ // Preserve existing return type behavior for division:
+ // Non-decimal division should return double
+ if (leftOI.getPrimitiveCategory() != PrimitiveCategory.DECIMAL
+ && rightOI.getPrimitiveCategory() != PrimitiveCategory.DECIMAL) {
+ return TypeInfoFactory.doubleTypeInfo;
+ }
+
+ return deriveResultDecimalTypeInfo();
+ }
+
+ @Override
+ protected PrimitiveTypeInfo deriveResultApproxTypeInfo() {
+ // Hive 0.12 behavior where double / decimal -> decimal is gone.
+ return TypeInfoFactory.doubleTypeInfo;
+ }
+
@Override
protected DoubleWritable evaluate(DoubleWritable left, DoubleWritable right) {
if (right.get() == 0.0) {
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExpressionEvaluator.java Fri Mar 14 23:52:33 2014
@@ -22,12 +22,14 @@ import java.util.ArrayList;
import junit.framework.TestCase;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -58,6 +60,10 @@ public class TestExpressionEvaluator ext
TypeInfo dataType;
public TestExpressionEvaluator() {
+ // Arithmetic operations rely on getting conf from SessionState, need to initialize here.
+ SessionState ss = new SessionState(new HiveConf());
+ SessionState.setCurrentSessionState(ss);
+
col1 = new ArrayList<Text>();
col1.add(new Text("0"));
col1.add(new Text("1"));
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPDivide.java Fri Mar 14 23:52:33 2014
@@ -19,7 +19,9 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -30,6 +32,7 @@ import org.apache.hadoop.hive.serde2.io.
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
@@ -37,7 +40,7 @@ import org.apache.hadoop.io.LongWritable
import org.junit.Assert;
import org.junit.Test;
-public class TestGenericUDFOPDivide {
+public class TestGenericUDFOPDivide extends TestGenericUDFOPNumeric {
@Test
public void testByteDivideShort() throws HiveException {
@@ -248,4 +251,45 @@ public class TestGenericUDFOPDivide {
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(prec3, scale3), oi.getTypeInfo());
}
+ @Test
+ public void testReturnTypeBackwardCompat() throws Exception {
+ // Disable ansi sql arithmetic changes
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "0.12");
+
+ verifyReturnType(new GenericUDFOPDivide(), "int", "int", "double"); // different from sql compat mode
+ verifyReturnType(new GenericUDFOPDivide(), "int", "float", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "int", "double", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "int", "decimal(10,2)", "decimal(23,11)");
+
+ verifyReturnType(new GenericUDFOPDivide(), "float", "float", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "float", "double", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "float", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPDivide(), "double", "double", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "double", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPDivide(), "decimal(10,2)", "decimal(10,2)", "decimal(23,13)");
+
+ // Most tests are done with ANSI SQL mode enabled, set it back to true
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+ }
+
+ @Test
+ public void testReturnTypeAnsiSql() throws Exception {
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+
+ verifyReturnType(new GenericUDFOPDivide(), "int", "int", "decimal(21,11)");
+ verifyReturnType(new GenericUDFOPDivide(), "int", "float", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "int", "double", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "int", "decimal(10,2)", "decimal(23,11)");
+
+ verifyReturnType(new GenericUDFOPDivide(), "float", "float", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "float", "double", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "float", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPDivide(), "double", "double", "double");
+ verifyReturnType(new GenericUDFOPDivide(), "double", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPDivide(), "decimal(10,2)", "decimal(10,2)", "decimal(23,13)");
+ }
}
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMinus.java Fri Mar 14 23:52:33 2014
@@ -19,7 +19,9 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -37,7 +39,7 @@ import org.apache.hadoop.io.LongWritable
import org.junit.Assert;
import org.junit.Test;
-public class TestGenericUDFOPMinus {
+public class TestGenericUDFOPMinus extends TestGenericUDFOPNumeric {
@Test
public void testByteMinusShort() throws HiveException {
@@ -141,9 +143,9 @@ public class TestGenericUDFOPMinus {
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
- Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo);
- DoubleWritable res = (DoubleWritable) udf.evaluate(args);
- Assert.assertEquals(new Double(4.5), new Double(res.get()));
+ Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.floatTypeInfo);
+ FloatWritable res = (FloatWritable) udf.evaluate(args);
+ Assert.assertEquals(new Float(4.5), new Float(res.get()));
}
@Test
@@ -201,4 +203,45 @@ public class TestGenericUDFOPMinus {
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 2), oi.getTypeInfo());
}
+ @Test
+ public void testReturnTypeBackwardCompat() throws Exception {
+ // Disable ansi sql arithmetic changes
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "0.12");
+
+ verifyReturnType(new GenericUDFOPMinus(), "int", "int", "int");
+ verifyReturnType(new GenericUDFOPMinus(), "int", "float", "float");
+ verifyReturnType(new GenericUDFOPMinus(), "int", "double", "double");
+ verifyReturnType(new GenericUDFOPMinus(), "int", "decimal(10,2)", "decimal(13,2)");
+
+ verifyReturnType(new GenericUDFOPMinus(), "float", "float", "float");
+ verifyReturnType(new GenericUDFOPMinus(), "float", "double", "double");
+ verifyReturnType(new GenericUDFOPMinus(), "float", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPMinus(), "double", "double", "double");
+ verifyReturnType(new GenericUDFOPMinus(), "double", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPMinus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)");
+
+ // Most tests are done with ANSI SQL mode enabled, set it back to true
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+ }
+
+ @Test
+ public void testReturnTypeAnsiSql() throws Exception {
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+
+ verifyReturnType(new GenericUDFOPMinus(), "int", "int", "int");
+ verifyReturnType(new GenericUDFOPMinus(), "int", "float", "float");
+ verifyReturnType(new GenericUDFOPMinus(), "int", "double", "double");
+ verifyReturnType(new GenericUDFOPMinus(), "int", "decimal(10,2)", "decimal(13,2)");
+
+ verifyReturnType(new GenericUDFOPMinus(), "float", "float", "float");
+ verifyReturnType(new GenericUDFOPMinus(), "float", "double", "double");
+ verifyReturnType(new GenericUDFOPMinus(), "float", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPMinus(), "double", "double", "double");
+ verifyReturnType(new GenericUDFOPMinus(), "double", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPMinus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)");
+ }
}
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMod.java Fri Mar 14 23:52:33 2014
@@ -36,7 +36,7 @@ import org.apache.hadoop.io.LongWritable
import org.junit.Assert;
import org.junit.Test;
-public class TestGenericUDFOPMod {
+public class TestGenericUDFOPMod extends TestGenericUDFOPNumeric {
@Test
public void testModByZero1() throws HiveException {
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPMultiply.java Fri Mar 14 23:52:33 2014
@@ -19,7 +19,9 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -37,7 +39,7 @@ import org.apache.hadoop.io.LongWritable
import org.junit.Assert;
import org.junit.Test;
-public class TestGenericUDFOPMultiply {
+public class TestGenericUDFOPMultiply extends TestGenericUDFOPNumeric {
@Test
public void testByteTimesShort() throws HiveException {
@@ -140,9 +142,9 @@ public class TestGenericUDFOPMultiply {
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
- Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo);
- DoubleWritable res = (DoubleWritable) udf.evaluate(args);
- Assert.assertEquals(new Double(0.0), new Double(res.get()));
+ Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.floatTypeInfo);
+ FloatWritable res = (FloatWritable) udf.evaluate(args);
+ Assert.assertEquals(new Float(0.0), new Float(res.get()));
}
@Test
@@ -200,4 +202,46 @@ public class TestGenericUDFOPMultiply {
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(11, 4), oi.getTypeInfo());
}
+
+ @Test
+ public void testReturnTypeBackwardCompat() throws Exception {
+ // Disable ansi sql arithmetic changes
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "0.12");
+
+ verifyReturnType(new GenericUDFOPMultiply(), "int", "int", "int");
+ verifyReturnType(new GenericUDFOPMultiply(), "int", "float", "float");
+ verifyReturnType(new GenericUDFOPMultiply(), "int", "double", "double");
+ verifyReturnType(new GenericUDFOPMultiply(), "int", "decimal(10,2)", "decimal(21,2)");
+
+ verifyReturnType(new GenericUDFOPMultiply(), "float", "float", "float");
+ verifyReturnType(new GenericUDFOPMultiply(), "float", "double", "double");
+ verifyReturnType(new GenericUDFOPMultiply(), "float", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPMultiply(), "double", "double", "double");
+ verifyReturnType(new GenericUDFOPMultiply(), "double", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPMultiply(), "decimal(10,2)", "decimal(10,2)", "decimal(21,4)");
+
+ // Most tests are done with ANSI SQL mode enabled, set it back to true
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+ }
+
+ @Test
+ public void testReturnTypeAnsiSql() throws Exception {
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+
+ verifyReturnType(new GenericUDFOPMultiply(), "int", "int", "int");
+ verifyReturnType(new GenericUDFOPMultiply(), "int", "float", "float");
+ verifyReturnType(new GenericUDFOPMultiply(), "int", "double", "double");
+ verifyReturnType(new GenericUDFOPMultiply(), "int", "decimal(10,2)", "decimal(21,2)");
+
+ verifyReturnType(new GenericUDFOPMultiply(), "float", "float", "float");
+ verifyReturnType(new GenericUDFOPMultiply(), "float", "double", "double");
+ verifyReturnType(new GenericUDFOPMultiply(), "float", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPMultiply(), "double", "double", "double");
+ verifyReturnType(new GenericUDFOPMultiply(), "double", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPMultiply(), "decimal(10,2)", "decimal(10,2)", "decimal(21,4)");
+ }
}
Added: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java?rev=1577763&view=auto
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java (added)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNumeric.java Fri Mar 14 23:52:33 2014
@@ -0,0 +1,39 @@
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hive.common.HiveCompat;
+import org.junit.Assert;
+
+public abstract class TestGenericUDFOPNumeric {
+ public TestGenericUDFOPNumeric() {
+ // Arithmetic operations rely on getting conf from SessionState, need to initialize here.
+ SessionState ss = new SessionState(new HiveConf());
+ ss.getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+ SessionState.setCurrentSessionState(ss);
+ }
+
+ protected void verifyReturnType(GenericUDF udf,
+ String typeStr1, String typeStr2, String expectedTypeStr) throws HiveException {
+ // Lookup type infos for our input types and expected return type
+ PrimitiveTypeInfo type1 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr1);
+ PrimitiveTypeInfo type2 = TypeInfoFactory.getPrimitiveTypeInfo(typeStr2);
+ PrimitiveTypeInfo expectedType = TypeInfoFactory.getPrimitiveTypeInfo(expectedTypeStr);
+
+ // Initialize UDF which will output the return type for the UDF.
+ ObjectInspector[] inputOIs = {
+ PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type1),
+ PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(type2)
+ };
+ PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
+
+ Assert.assertEquals("Return type for " + udf.getDisplayString(new String[] {typeStr1, typeStr2}),
+ expectedType, oi.getTypeInfo());
+ }
+}
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPPlus.java Fri Mar 14 23:52:33 2014
@@ -19,7 +19,9 @@
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
@@ -37,7 +39,7 @@ import org.apache.hadoop.io.LongWritable
import org.junit.Assert;
import org.junit.Test;
-public class TestGenericUDFOPPlus {
+public class TestGenericUDFOPPlus extends TestGenericUDFOPNumeric {
@Test
public void testBytePlusShort() throws HiveException {
@@ -145,9 +147,9 @@ public class TestGenericUDFOPPlus {
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
- Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.doubleTypeInfo);
- DoubleWritable res = (DoubleWritable) udf.evaluate(args);
- Assert.assertEquals(new Double(4.5), new Double(res.get()));
+ Assert.assertEquals(oi.getTypeInfo(), TypeInfoFactory.floatTypeInfo);
+ FloatWritable res = (FloatWritable) udf.evaluate(args);
+ Assert.assertEquals(new Float(4.5), new Float(res.get()));
}
@Test
@@ -207,4 +209,45 @@ public class TestGenericUDFOPPlus {
Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6, 2), oi.getTypeInfo());
}
+ @Test
+ public void testReturnTypeBackwardCompat() throws Exception {
+ // Disable ansi sql arithmetic changes
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "0.12");
+
+ verifyReturnType(new GenericUDFOPPlus(), "int", "int", "int");
+ verifyReturnType(new GenericUDFOPPlus(), "int", "float", "float");
+ verifyReturnType(new GenericUDFOPPlus(), "int", "double", "double");
+ verifyReturnType(new GenericUDFOPPlus(), "int", "decimal(10,2)", "decimal(13,2)");
+
+ verifyReturnType(new GenericUDFOPPlus(), "float", "float", "float");
+ verifyReturnType(new GenericUDFOPPlus(), "float", "double", "double");
+ verifyReturnType(new GenericUDFOPPlus(), "float", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPPlus(), "double", "double", "double");
+ verifyReturnType(new GenericUDFOPPlus(), "double", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPPlus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)");
+
+ // Most tests are done with ANSI SQL mode enabled, set it back to true
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+ }
+
+ @Test
+ public void testReturnTypeAnsiSql() throws Exception {
+ SessionState.get().getConf().setVar(HiveConf.ConfVars.HIVE_COMPAT, "latest");
+
+ verifyReturnType(new GenericUDFOPPlus(), "int", "int", "int");
+ verifyReturnType(new GenericUDFOPPlus(), "int", "float", "float");
+ verifyReturnType(new GenericUDFOPPlus(), "int", "double", "double");
+ verifyReturnType(new GenericUDFOPPlus(), "int", "decimal(10,2)", "decimal(13,2)");
+
+ verifyReturnType(new GenericUDFOPPlus(), "float", "float", "float");
+ verifyReturnType(new GenericUDFOPPlus(), "float", "double", "double");
+ verifyReturnType(new GenericUDFOPPlus(), "float", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPPlus(), "double", "double", "double");
+ verifyReturnType(new GenericUDFOPPlus(), "double", "decimal(10,2)", "double");
+
+ verifyReturnType(new GenericUDFOPPlus(), "decimal(10,2)", "decimal(10,2)", "decimal(11,2)");
+ }
}
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFPosMod.java Fri Mar 14 23:52:33 2014
@@ -36,7 +36,7 @@ import org.apache.hadoop.io.LongWritable
import org.junit.Assert;
import org.junit.Test;
-public class TestGenericUDFPosMod {
+public class TestGenericUDFPosMod extends TestGenericUDFOPNumeric {
@Test
public void testPosModByZero1() throws HiveException {
Added: hive/trunk/ql/src/test/queries/clientpositive/ansi_sql_arithmetic.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/ansi_sql_arithmetic.q?rev=1577763&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/ansi_sql_arithmetic.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/ansi_sql_arithmetic.q Fri Mar 14 23:52:33 2014
@@ -0,0 +1,13 @@
+
+set hive.compat=latest;
+
+-- With ansi sql arithmetic enabled, int / int => exact numeric type
+explain select cast(key as int) / cast(key as int) from src limit 1;
+select cast(key as int) / cast(key as int) from src limit 1;
+
+
+set hive.compat=0.12;
+
+-- With ansi sql arithmetic disabled, int / int => double
+explain select cast(key as int) / cast(key as int) from src limit 1;
+select cast(key as int) / cast(key as int) from src limit 1;
Added: hive/trunk/ql/src/test/results/clientpositive/ansi_sql_arithmetic.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ansi_sql_arithmetic.q.out?rev=1577763&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ansi_sql_arithmetic.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/ansi_sql_arithmetic.q.out Fri Mar 14 23:52:33 2014
@@ -0,0 +1,90 @@
+PREHOOK: query: -- With ansi sql arithmetic enabled, int / int => exact numeric type
+explain select cast(key as int) / cast(key as int) from src limit 1
+PREHOOK: type: QUERY
+POSTHOOK: query: -- With ansi sql arithmetic enabled, int / int => exact numeric type
+explain select cast(key as int) / cast(key as int) from src limit 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: src
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: (UDFToInteger(key) / UDFToInteger(key)) (type: decimal(21,11))
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 1
+ Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: 1
+
+PREHOOK: query: select cast(key as int) / cast(key as int) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(key as int) / cast(key as int) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1
+PREHOOK: query: -- With ansi sql arithmetic disabled, int / int => double
+explain select cast(key as int) / cast(key as int) from src limit 1
+PREHOOK: type: QUERY
+POSTHOOK: query: -- With ansi sql arithmetic disabled, int / int => double
+explain select cast(key as int) / cast(key as int) from src limit 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: src
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: (UDFToInteger(key) / UDFToInteger(key)) (type: double)
+ outputColumnNames: _col0
+ Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Limit
+ Number of rows: 1
+ Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 1 Data size: 100 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: 1
+
+PREHOOK: query: select cast(key as int) / cast(key as int) from src limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select cast(key as int) / cast(key as int) from src limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+1.0
Modified: hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/decimal_udf.q.out Fri Mar 14 23:52:33 2014
@@ -175,7 +175,7 @@ STAGE PLANS:
alias: decimal_udf
Statistics: Num rows: 3 Data size: 359 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: (key + (value / 2)) (type: decimal(21,10))
+ expressions: (key + (value / 2)) (type: double)
outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 359 Basic stats: COMPLETE Column stats: NONE
ListSink
@@ -188,19 +188,19 @@ POSTHOOK: query: SELECT key + (value/2)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_udf
#### A masked pattern was here ####
--2200
+-2200.0
NULL
-0
-0
-150
-15
+0.0
+0.0
+150.0
+15.0
1.5
0.1
0.01
-300
-30
-3
-0
+300.0
+30.0
+3.0
+0.0
0.2
0.02
0.3
@@ -210,22 +210,22 @@ NULL
-0.33
-0.333
1.5
-3
-4.64
+3.0
+4.640000000000001
-1.62
-1.62
-6.622
1.62
1.622
-186
+186.0
187.7
-1882.99
-4.64
-4.64
-5.14
+4.640000000000001
+4.640000000000001
+5.140000000000001
1.5
--1851851835.123456789
-1851851835.12345678
+-1.8518518351234567E9
+1.8518518351234567E9
PREHOOK: query: EXPLAIN SELECT key + '1.0' FROM DECIMAL_UDF
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN SELECT key + '1.0' FROM DECIMAL_UDF
@@ -445,7 +445,7 @@ STAGE PLANS:
alias: decimal_udf
Statistics: Num rows: 3 Data size: 359 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: (key - (value / 2)) (type: decimal(21,10))
+ expressions: (key - (value / 2)) (type: double)
outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 359 Basic stats: COMPLETE Column stats: NONE
ListSink
@@ -458,19 +458,19 @@ POSTHOOK: query: SELECT key - (value/2)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_udf
#### A masked pattern was here ####
--6600
+-6600.0
NULL
-0
-0
-50
-5
+0.0
+0.0
+50.0
+5.0
0.5
0.1
0.01
-100
-10
-1
-0
+100.0
+10.0
+1.0
+0.0
0.2
0.02
0.3
@@ -480,22 +480,22 @@ NULL
-0.33
-0.333
0.5
-1
-1.64
--0.62
--0.62
+1.0
+1.6400000000000001
+-0.6200000000000001
+-0.6200000000000001
4.378
-0.62
-0.622
-62
+0.6200000000000001
+0.6220000000000001
+62.0
62.7
-627.99
-1.64
-1.64
-1.14
+1.6400000000000001
+1.6400000000000001
+1.1400000000000001
0.5
--617283945.123456789
-617283945.12345678
+-6.172839451234567E8
+6.172839451234567E8
PREHOOK: query: EXPLAIN SELECT key - '1.0' FROM DECIMAL_UDF
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN SELECT key - '1.0' FROM DECIMAL_UDF
@@ -715,7 +715,7 @@ STAGE PLANS:
alias: decimal_udf
Statistics: Num rows: 3 Data size: 359 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: (key * (value / 2)) (type: decimal(37,16))
+ expressions: (key * (value / 2)) (type: double)
outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 359 Basic stats: COMPLETE Column stats: NONE
ListSink
@@ -728,44 +728,44 @@ POSTHOOK: query: SELECT key * (value/2)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_udf
#### A masked pattern was here ####
--9680000
+-9680000.0
NULL
-0
-0
-5000
-50
+0.0
+0.0
+5000.0
+50.0
0.5
-0
-0
-20000
-200
-2
-0
-0
-0
-0
-0
-0
-0
-0
-0
+0.0
+0.0
+20000.0
+200.0
+2.0
+0.0
+0.0
+0.0
+0.0
+0.0
+0.0
+-0.0
+-0.0
+-0.0
0.5
-2
+2.0
4.71
0.56
0.56
6.171
0.56
0.561
-7688
-7825
+7688.0
+7825.0
787819.975
4.71
4.71
6.28
0.5
-762078937585733943.750952605
-762078937585733938.1953971
+7.6207893758573389E17
+7.6207893758573389E17
PREHOOK: query: EXPLAIN SELECT key * '2.0' FROM DECIMAL_UDF
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN SELECT key * '2.0' FROM DECIMAL_UDF
@@ -1042,7 +1042,7 @@ STAGE PLANS:
predicate: (value is not null and (value <> 0)) (type: boolean)
Statistics: Num rows: 2 Data size: 239 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: (key / (value / 2)) (type: decimal(38,24))
+ expressions: (key / (value / 2)) (type: double)
outputColumnNames: _col0
Statistics: Num rows: 2 Data size: 239 Basic stats: COMPLETE Column stats: NONE
ListSink
@@ -1055,30 +1055,30 @@ POSTHOOK: query: SELECT key / (value/2)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@decimal_udf
#### A masked pattern was here ####
--2
-2
-2
-2
-2
-2
-2
-2
-2
-2.093333333333333333333333
+-2.0
+2.0
+2.0
+2.0
+2.0
+2.0
+2.0
+2.0
+2.0
+2.0933333333333333
2.24
2.24
-0.204
+0.20400000000000001
2.24
2.244
-2
+2.0
2.0032
-2.00078087649402390438247
-2.093333333333333333333333
-2.093333333333333333333333
+2.000780876494024
+2.0933333333333333
+2.0933333333333333
1.57
-2
+2.0
+2.0000000002
2.0000000002
-2.00000000019999998542
PREHOOK: query: EXPLAIN SELECT key / '2.0' FROM DECIMAL_UDF
PREHOOK: type: QUERY
POSTHOOK: query: EXPLAIN SELECT key / '2.0' FROM DECIMAL_UDF
Modified: hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/ql_rewrite_gbtoidx.q.out Fri Mar 14 23:52:33 2014
@@ -846,7 +846,7 @@ STAGE PLANS:
outputColumnNames: _col1, _col2, _col4, _col5
Statistics: Num rows: 25 Data size: 2308 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col1 (type: int), _col4 (type: int), ((_col5 - _col2) / _col2) (type: decimal(38,19))
+ expressions: _col1 (type: int), _col4 (type: int), ((_col5 - _col2) / _col2) (type: double)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 25 Data size: 2308 Basic stats: COMPLETE Column stats: NONE
File Output Operator
Modified: hive/trunk/ql/src/test/results/clientpositive/udf_pmod.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_pmod.q.out?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_pmod.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_pmod.q.out Fri Mar 14 23:52:33 2014
@@ -76,7 +76,7 @@ POSTHOOK: query: SELECT pmod(CAST(-100.9
POSTHOOK: type: QUERY
POSTHOOK: Input: default@src
#### A masked pattern was here ####
-6.889998435974121 51.70000457763672 18.089996337890625
+6.8899984 51.700005 18.089996
PREHOOK: query: SELECT pmod(CAST(-100.91 AS DOUBLE),CAST(9.8 AS DOUBLE)), pmod(CAST(-50.1 AS DOUBLE),CAST(101.8 AS DOUBLE)), pmod(CAST(-100.91 AS DOUBLE),CAST(29.75 AS DOUBLE)) FROM src tablesample (1 rows)
PREHOOK: type: QUERY
PREHOOK: Input: default@src
Modified: hive/trunk/ql/src/test/results/clientpositive/vectorization_15.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/vectorization_15.q.out?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/vectorization_15.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/vectorization_15.q.out Fri Mar 14 23:52:33 2014
@@ -77,32 +77,32 @@ NULL true 10419.0 10 NULL -721614386 NUL
NULL true 14519.0 100xJdkyc NULL 729277608 NULL NULL -7.2927763428E8 14519.0 1155030.007 NULL NULL NULL -23.0 NULL 0.0 NULL NULL 7.2927763428E8 0.0
-62.0 NULL 15601.0 NULL -62 NULL 1969-12-31 16:00:09.889 0.0 NULL 15601.0 1241106.353 33.0 0.0 0.0 -23.0 62 NULL NULL -23 NULL NULL
-51.0 NULL -200.0 NULL -51 NULL 1969-12-31 15:59:55.423 0.0 NULL -200.0 -15910.599999999999 33.0 0.0 0.0 -23.0 51 NULL NULL -23 NULL NULL
--51.0 false NULL 10 -51 1058319346 1969-12-31 16:00:08.451 0.0 -1.05831937228E9 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 1.058319397E9 -23 1.05831937228E9 0.0
--51.0 false NULL 10TYIE5S35U6dj3N -51 -469581869 1969-12-31 16:00:08.451 0.0 4.6958184272E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -4.69581818E8 -23 -4.6958184272E8 0.0
--51.0 false NULL 1Lh6Uoq3WhNtOqQHu7WN7U -51 -352637533 1969-12-31 16:00:08.451 0.0 3.5263750672E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -3.52637482E8 -23 -3.5263750672E8 0.0
--51.0 true NULL 04Y1mA17 -51 -114647521 1969-12-31 16:00:08.451 0.0 1.1464749472E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -1.1464747E8 -23 -1.1464749472E8 0.0
+-51.0 false NULL 10 -51 1058319346 1969-12-31 16:00:08.451 0.0 -1.05831937228E9 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 1.05831942E9 -23 1.05831937228E9 0.0
+-51.0 false NULL 10TYIE5S35U6dj3N -51 -469581869 1969-12-31 16:00:08.451 0.0 4.6958184272E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -4.69581792E8 -23 -4.6958184272E8 0.0
+-51.0 false NULL 1Lh6Uoq3WhNtOqQHu7WN7U -51 -352637533 1969-12-31 16:00:08.451 0.0 3.5263750672E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -3.52637472E8 -23 -3.5263750672E8 0.0
+-51.0 true NULL 04Y1mA17 -51 -114647521 1969-12-31 16:00:08.451 0.0 1.1464749472E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -1.14647472E8 -23 -1.1464749472E8 0.0
-51.0 true NULL 10Wu570aLPO0p02P17FeH -51 405338893 1969-12-31 16:00:08.451 0.0 -4.0533891928E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 4.05338944E8 -23 4.0533891928E8 0.0
-51.0 true NULL 3cQp060 -51 -226923315 1969-12-31 16:00:08.451 0.0 2.2692328872E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -2.26923264E8 -23 -2.2692328872E8 0.0
--51.0 true NULL 8EPG0Xi307qd -51 -328662044 1969-12-31 16:00:08.451 0.0 3.2866201772E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -3.28661993E8 -23 -3.2866201772E8 0.0
--51.0 true NULL 8iHtdkJ6d -51 1006818344 1969-12-31 16:00:08.451 0.0 -1.00681837028E9 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 1.006818395E9 -23 1.00681837028E9 0.0
--51.0 true NULL QiOcvR0kt6r7f0R7fiPxQTCU -51 266531954 1969-12-31 16:00:08.451 0.0 -2.6653198028E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 2.66532005E8 -23 2.6653198028E8 0.0
--51.0 true NULL Ybpj38RTTYl7CnJXPNx1g4C -51 -370919370 1969-12-31 16:00:08.451 0.0 3.7091934372E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -3.70919319E8 -23 -3.7091934372E8 0.0
+-51.0 true NULL 8EPG0Xi307qd -51 -328662044 1969-12-31 16:00:08.451 0.0 3.2866201772E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -3.28661984E8 -23 -3.2866201772E8 0.0
+-51.0 true NULL 8iHtdkJ6d -51 1006818344 1969-12-31 16:00:08.451 0.0 -1.00681837028E9 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 1.00681843E9 -23 1.00681837028E9 0.0
+-51.0 true NULL QiOcvR0kt6r7f0R7fiPxQTCU -51 266531954 1969-12-31 16:00:08.451 0.0 -2.6653198028E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 2.66532E8 -23 2.6653198028E8 0.0
+-51.0 true NULL Ybpj38RTTYl7CnJXPNx1g4C -51 -370919370 1969-12-31 16:00:08.451 0.0 3.7091934372E8 NULL NULL 33.0 0.0 0.0 NULL 51 0.0 -3.70919296E8 -23 -3.7091934372E8 0.0
-48.0 NULL -7196.0 NULL -48 NULL 1969-12-31 16:00:06.337 0.0 NULL -7196.0 -572463.388 33.0 0.0 0.0 -23.0 48 NULL NULL -23 NULL NULL
-6.0 NULL -200.0 NULL -6 NULL 1969-12-31 15:59:56.094 0.0 NULL -200.0 -15910.599999999999 3.0 0.0 0.0 -23.0 6 NULL NULL -5 NULL NULL
5.0 NULL 15601.0 NULL 5 NULL 1969-12-31 16:00:00.959 0.0 NULL 15601.0 1241106.353 3.0 0.0 0.0 -23.0 -5 NULL NULL -3 NULL NULL
-8.0 false NULL 10V3pN5r5lI2qWl2lG103 8 -362835731 1969-12-31 16:00:15.892 0.0 3.6283570472E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -3.62835739E8 -7 -3.6283570472E8 0.0
-8.0 false NULL 10c4qt584m5y6uWT 8 -183000142 1969-12-31 16:00:15.892 0.0 1.8300011572E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -1.8300015E8 -7 -1.8300011572E8 0.0
+8.0 false NULL 10V3pN5r5lI2qWl2lG103 8 -362835731 1969-12-31 16:00:15.892 0.0 3.6283570472E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -3.62835744E8 -7 -3.6283570472E8 0.0
+8.0 false NULL 10c4qt584m5y6uWT 8 -183000142 1969-12-31 16:00:15.892 0.0 1.8300011572E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -1.8300016E8 -7 -1.8300011572E8 0.0
8.0 false NULL 8GloEukQ0c68JDmnYL53 8 -722873402 1969-12-31 16:00:15.892 0.0 7.2287337572E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -7.2287341E8 -7 -7.2287337572E8 0.0
8.0 false NULL kA0XH5C5 8 -503903864 1969-12-31 16:00:15.892 0.0 5.0390383772E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -5.03903872E8 -7 -5.0390383772E8 0.0
-8.0 true NULL 100VTM7PEW8GH1uE 8 88129338 1969-12-31 16:00:15.892 0.0 -8.812936428E7 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 8.812933E7 -7 8.812936428E7 0.0
-8.0 true NULL 1062158y 8 -1005155523 1969-12-31 16:00:15.892 0.0 1.00515549672E9 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -1.005155531E9 -7 -1.00515549672E9 0.0
-8.0 true NULL 1063cEnGjSal 8 -624769630 1969-12-31 16:00:15.892 0.0 6.2476960372E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -6.24769638E8 -7 -6.2476960372E8 0.0
-8.0 true NULL 4kMasVoB7lX1wc5i64bNk 8 683567667 1969-12-31 16:00:15.892 0.0 -6.8356769328E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 6.83567659E8 -7 6.8356769328E8 0.0
-8.0 true NULL XH6I7A417 8 436627202 1969-12-31 16:00:15.892 0.0 -4.3662722828E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 4.36627194E8 -7 4.3662722828E8 0.0
-11.0 false NULL 10pO8p1LNx4Y 11 271296824 1969-12-31 16:00:02.351 0.0 -2.7129685028E8 NULL NULL 0.0 0.0 0.0 NULL -11 0.0 2.71296813E8 -1 2.7129685028E8 0.0
-11.0 false NULL 1H6wGP 11 -560827082 1969-12-31 16:00:02.351 0.0 5.6082705572E8 NULL NULL 0.0 0.0 0.0 NULL -11 0.0 -5.60827093E8 -1 -5.6082705572E8 0.0
-11.0 false NULL 2a7V63IL7jK3o 11 -325931647 1969-12-31 16:00:02.351 0.0 3.2593162072E8 NULL NULL 0.0 0.0 0.0 NULL -11 0.0 -3.25931658E8 -1 -3.2593162072E8 0.0
-11.0 true NULL 10 11 92365813 1969-12-31 16:00:02.351 0.0 -9.236583928E7 NULL NULL 0.0 0.0 0.0 NULL -11 0.0 9.2365802E7 -1 9.236583928E7 0.0
+8.0 true NULL 100VTM7PEW8GH1uE 8 88129338 1969-12-31 16:00:15.892 0.0 -8.812936428E7 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 8.8129328E7 -7 8.812936428E7 0.0
+8.0 true NULL 1062158y 8 -1005155523 1969-12-31 16:00:15.892 0.0 1.00515549672E9 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -1.00515552E9 -7 -1.00515549672E9 0.0
+8.0 true NULL 1063cEnGjSal 8 -624769630 1969-12-31 16:00:15.892 0.0 6.2476960372E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 -6.247696E8 -7 -6.2476960372E8 0.0
+8.0 true NULL 4kMasVoB7lX1wc5i64bNk 8 683567667 1969-12-31 16:00:15.892 0.0 -6.8356769328E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 6.8356768E8 -7 6.8356769328E8 0.0
+8.0 true NULL XH6I7A417 8 436627202 1969-12-31 16:00:15.892 0.0 -4.3662722828E8 NULL NULL 1.0 0.0 0.0 NULL -8 0.0 4.366272E8 -7 4.3662722828E8 0.0
+11.0 false NULL 10pO8p1LNx4Y 11 271296824 1969-12-31 16:00:02.351 0.0 -2.7129685028E8 NULL NULL 0.0 0.0 0.0 NULL -11 0.0 2.71296832E8 -1 2.7129685028E8 0.0
+11.0 false NULL 1H6wGP 11 -560827082 1969-12-31 16:00:02.351 0.0 5.6082705572E8 NULL NULL 0.0 0.0 0.0 NULL -11 0.0 -5.6082707E8 -1 -5.6082705572E8 0.0
+11.0 false NULL 2a7V63IL7jK3o 11 -325931647 1969-12-31 16:00:02.351 0.0 3.2593162072E8 NULL NULL 0.0 0.0 0.0 NULL -11 0.0 -3.25931648E8 -1 -3.2593162072E8 0.0
+11.0 true NULL 10 11 92365813 1969-12-31 16:00:02.351 0.0 -9.236583928E7 NULL NULL 0.0 0.0 0.0 NULL -11 0.0 9.2365808E7 -1 9.236583928E7 0.0
21.0 NULL 15601.0 NULL 21 NULL 1969-12-31 16:00:14.256 0.0 NULL 15601.0 1241106.353 12.0 0.0 0.0 -23.0 -21 NULL NULL -2 NULL NULL
32.0 NULL -200.0 NULL 32 NULL 1969-12-31 16:00:02.445 0.0 NULL -200.0 -15910.599999999999 1.0 0.0 0.0 -23.0 -32 NULL NULL -23 NULL NULL
36.0 NULL -200.0 NULL 36 NULL 1969-12-31 16:00:00.554 0.0 NULL -200.0 -15910.599999999999 33.0 0.0 0.0 -23.0 -36 NULL NULL -23 NULL NULL
Modified: hive/trunk/ql/src/test/results/clientpositive/vectorization_5.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/vectorization_5.q.out?rev=1577763&r1=1577762&r2=1577763&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/vectorization_5.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/vectorization_5.q.out Fri Mar 14 23:52:33 2014
@@ -40,4 +40,4 @@ WHERE (((cboolean2 IS NOT NULL)
POSTHOOK: type: QUERY
POSTHOOK: Input: default@alltypesorc
#### A masked pattern was here ####
-16343 -1225725 1070 -1145.53738317757009345794 114090483 -16307 16307 197 -26853917571 11 -11 0
+16343 -1225725 1070 -1145.53738317757 114090483 -16307 16307 197.0 -26853917571 11 -11 0