You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2016/04/04 22:37:05 UTC

[30/50] [abbrv] hive git commit: HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
index 39823fe..31d2f78 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToTimestamp.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastDoubleToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -37,9 +38,11 @@ public class CastDoubleToTimestamp extends VectorExpression {
     super();
   }
 
-  private void setSecondsWithFractionalNanoseconds(TimestampColumnVector timestampColVector,
+  private void setDouble(TimestampColumnVector timestampColVector,
       double[] vector, int elementNum) {
-    timestampColVector.setTimestampSecondsWithFractionalNanoseconds(elementNum, vector[elementNum]);
+    TimestampWritable.setTimestampFromDouble(
+        timestampColVector.getScratchTimestamp(), vector[elementNum]);
+    timestampColVector.setFromScratchTimestamp(elementNum);
   }
 
   @Override
@@ -66,7 +69,7 @@ public class CastDoubleToTimestamp extends VectorExpression {
     if (inputColVector.isRepeating) {
       //All must be selected otherwise size would be zero
       //Repeating property will not change.
-      setSecondsWithFractionalNanoseconds(outputColVector, vector, 0);
+      setDouble(outputColVector, vector, 0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
       outputIsNull[0] = inputIsNull[0];
       outputColVector.isRepeating = true;
@@ -74,11 +77,11 @@ public class CastDoubleToTimestamp extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          setSecondsWithFractionalNanoseconds(outputColVector, vector, i);
+          setDouble(outputColVector, vector, i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          setSecondsWithFractionalNanoseconds(outputColVector, vector, i);
+          setDouble(outputColVector, vector, i);
         }
       }
       outputColVector.isRepeating = false;
@@ -86,12 +89,12 @@ public class CastDoubleToTimestamp extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          setSecondsWithFractionalNanoseconds(outputColVector, vector, i);
+          setDouble(outputColVector, vector, i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          setSecondsWithFractionalNanoseconds(outputColVector, vector, i);
+          setDouble(outputColVector, vector, i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
index d344d4d..a2ee52d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToTimestamp.java
@@ -20,8 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastLongToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -40,7 +39,10 @@ public class CastLongToTimestamp extends VectorExpression {
   }
 
   private void setSeconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
-    timestampColVector.setTimestampSeconds(elementNum, vector[elementNum]);
+    TimestampWritable.setTimestampFromLong(
+        timestampColVector.getScratchTimestamp(), vector[elementNum],
+        /* intToTimestampInSeconds */ true);
+    timestampColVector.setFromScratchTimestamp(elementNum);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
index a0c947f..01c8810 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastMillisecondsLongToTimestamp.java
@@ -20,8 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastMillisecondsLongToTimestamp extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -39,6 +38,13 @@ public class CastMillisecondsLongToTimestamp extends VectorExpression {
     super();
   }
 
+  private void setMilliseconds(TimestampColumnVector timestampColVector, long[] vector, int elementNum) {
+    TimestampWritable.setTimestampFromLong(
+        timestampColVector.getScratchTimestamp(), vector[elementNum],
+        /* intToTimestampInSeconds */ false);
+    timestampColVector.setFromScratchTimestamp(elementNum);
+  }
+
   @Override
   public void evaluate(VectorizedRowBatch batch) {
 
@@ -63,19 +69,19 @@ public class CastMillisecondsLongToTimestamp extends VectorExpression {
     if (inputColVector.isRepeating) {
       //All must be selected otherwise size would be zero
       //Repeating property will not change.
-      outputColVector.setTimestampMilliseconds(0, vector[0]);
+      setMilliseconds(outputColVector, vector, 0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0]; 
+      outputIsNull[0] = inputIsNull[0];
       outputColVector.isRepeating = true;
     } else if (inputColVector.noNulls) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.setTimestampMilliseconds(i, vector[i]);
+          setMilliseconds(outputColVector, vector, i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.setTimestampMilliseconds(i, vector[i]);
+          setMilliseconds(outputColVector, vector, i);
         }
       }
       outputColVector.isRepeating = false;
@@ -83,12 +89,12 @@ public class CastMillisecondsLongToTimestamp extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.setTimestampMilliseconds(i, vector[i]);
+          setMilliseconds(outputColVector, vector, i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.setTimestampMilliseconds(i, vector[i]);
+          setMilliseconds(outputColVector, vector, i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
index a3ddf9f..c8844c8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToIntervalDayTime.java
@@ -21,7 +21,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -56,7 +56,7 @@ public class CastStringToIntervalDayTime extends VectorExpression {
     BytesColumnVector inV = (BytesColumnVector) batch.cols[inputColumn];
     int[] sel = batch.selected;
     int n = batch.size;
-    TimestampColumnVector outV = (TimestampColumnVector) batch.cols[outputColumn];
+    IntervalDayTimeColumnVector outV = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
 
     if (n == 0) {
 
@@ -113,11 +113,11 @@ public class CastStringToIntervalDayTime extends VectorExpression {
     }
   }
 
-  private void evaluate(TimestampColumnVector outV, BytesColumnVector inV, int i) {
+  private void evaluate(IntervalDayTimeColumnVector outV, BytesColumnVector inV, int i) {
     try {
       HiveIntervalDayTime interval = HiveIntervalDayTime.valueOf(
           new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8"));
-      outV.setEpochSecondsAndSignedNanos(i, interval.getTotalSeconds(), interval.getNanos());
+      outV.set(i, interval);
     } catch (Exception e) {
       outV.setNullValue(i);
       outV.isNull[i] = true;

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
index 55b84b1..b8a58cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToBoolean.java
@@ -41,8 +41,8 @@ public class CastTimestampToBoolean extends VectorExpression {
   }
 
   private int toBool(TimestampColumnVector timestampColVector, int index) {
-    return (timestampColVector.getEpochDay(index) != 0 ||
-            timestampColVector.getNanoOfDay(index) != 0) ? 1 : 0;
+    return (timestampColVector.getTime(index) != 0 ||
+            timestampColVector.getNanos(index) != 0) ? 1 : 0;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
index 00790b9..4e3e62c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDate.java
@@ -44,6 +44,6 @@ public class CastTimestampToDate extends FuncTimestampToLong {
   @Override
   protected void func(LongColumnVector outV, TimestampColumnVector inV, int i) {
 
-    outV.vector[i] = DateWritable.millisToDays(inV.getTimestampMilliseconds(i));
+    outV.vector[i] = DateWritable.millisToDays(inV.getTime(i));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
index aec104e..e5bfb15 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
@@ -18,9 +18,9 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 /**
  * To be used to cast timestamp to decimal.
@@ -39,11 +39,6 @@ public class CastTimestampToDecimal extends FuncTimestampToDecimal {
 
   @Override
   protected void func(DecimalColumnVector outV, TimestampColumnVector inV, int i) {
-
-    // The BigDecimal class recommends not converting directly from double to BigDecimal,
-    // so we convert like the non-vectorized case and got through a string...
-    Double timestampDouble = inV.getTimestampSecondsWithFractionalNanos(i);
-    HiveDecimal result = HiveDecimal.create(timestampDouble.toString());
-    outV.set(i, result);
+    outV.set(i, TimestampWritable.getHiveDecimal(inV.asScratchTimestamp(i)));
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
index f8737f9..a955d79 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDouble.java
@@ -20,8 +20,7 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 
 public class CastTimestampToDouble extends VectorExpression {
   private static final long serialVersionUID = 1L;
@@ -63,7 +62,7 @@ public class CastTimestampToDouble extends VectorExpression {
     if (inputColVector.isRepeating) {
       //All must be selected otherwise size would be zero
       //Repeating property will not change.
-      outputVector[0] =  inputColVector.getTimestampSecondsWithFractionalNanos(0);
+      outputVector[0] = inputColVector.getDouble(0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
       outputIsNull[0] = inputIsNull[0];
       outputColVector.isRepeating = true;
@@ -71,11 +70,11 @@ public class CastTimestampToDouble extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] =  inputColVector.getTimestampSecondsWithFractionalNanos(i);
+          outputVector[i] =  inputColVector.getDouble(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] =  inputColVector.getTimestampSecondsWithFractionalNanos(i);
+          outputVector[i] =  inputColVector.getDouble(i);
         }
       }
       outputColVector.isRepeating = false;
@@ -83,12 +82,12 @@ public class CastTimestampToDouble extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] =  inputColVector.getTimestampSecondsWithFractionalNanos(i);
+          outputVector[i] = inputColVector.getDouble(i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] =  inputColVector.getTimestampSecondsWithFractionalNanos(i);
+          outputVector[i] = inputColVector.getDouble(i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
index 4f53f5c..ba2e823 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToLong.java
@@ -64,19 +64,19 @@ public class CastTimestampToLong extends VectorExpression {
     if (inputColVector.isRepeating) {
       //All must be selected otherwise size would be zero
       //Repeating property will not change.
-      outputVector[0] =  inputColVector.getEpochSeconds(0);
+      outputVector[0] =  inputColVector.getTimestampAsLong(0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0]; 
+      outputIsNull[0] = inputIsNull[0];
       outputColVector.isRepeating = true;
     } else if (inputColVector.noNulls) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] =  inputColVector.getEpochSeconds(i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] =  inputColVector.getEpochSeconds(i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
         }
       }
       outputColVector.isRepeating = false;
@@ -84,12 +84,12 @@ public class CastTimestampToLong extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] =  inputColVector.getEpochSeconds(i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] =  inputColVector.getEpochSeconds(i);
+          outputVector[i] =  inputColVector.getTimestampAsLong(i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
index 24ee9bc..8a743f6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
@@ -24,13 +24,9 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hive.common.util.DateUtils;
 
 /**
  * Constant is represented as a vector with repeating values.
@@ -44,7 +40,8 @@ public class ConstantVectorExpression extends VectorExpression {
   private double doubleValue = 0;
   private byte[] bytesValue = null;
   private HiveDecimal decimalValue = null;
-  private PisaTimestamp timestampValue = null;
+  private Timestamp timestampValue = null;
+  private HiveIntervalDayTime intervalDayTimeValue = null;
   private boolean isNullValue = false;
 
   private ColumnVector.Type type;
@@ -97,7 +94,7 @@ public class ConstantVectorExpression extends VectorExpression {
   }
 
   public ConstantVectorExpression(int outputColumn, HiveIntervalDayTime value) {
-    this(outputColumn, "timestamp");
+    this(outputColumn, "interval_day_time");
     setIntervalDayTimeValue(value);
   }
 
@@ -165,6 +162,17 @@ public class ConstantVectorExpression extends VectorExpression {
     }
   }
 
+  private void evaluateIntervalDayTime(VectorizedRowBatch vrg) {
+    IntervalDayTimeColumnVector dcv = (IntervalDayTimeColumnVector) vrg.cols[outputColumn];
+    dcv.isRepeating = true;
+    dcv.noNulls = !isNullValue;
+    if (!isNullValue) {
+      dcv.set(0, intervalDayTimeValue);
+    } else {
+      dcv.isNull[0] = true;
+    }
+  }
+
   @Override
   public void evaluate(VectorizedRowBatch vrg) {
     switch (type) {
@@ -183,6 +191,9 @@ public class ConstantVectorExpression extends VectorExpression {
     case TIMESTAMP:
       evaluateTimestamp(vrg);
       break;
+    case INTERVAL_DAY_TIME:
+      evaluateIntervalDayTime(vrg);
+      break;
     }
   }
 
@@ -225,16 +236,19 @@ public class ConstantVectorExpression extends VectorExpression {
   }
 
   public void setTimestampValue(Timestamp timestampValue) {
-    this.timestampValue = new PisaTimestamp(timestampValue);
+    this.timestampValue = timestampValue;
   }
 
-  public void setIntervalDayTimeValue(HiveIntervalDayTime intervalDayTimeValue) {
-    this.timestampValue = intervalDayTimeValue.pisaTimestampUpdate(new PisaTimestamp());
+  public Timestamp getTimestampValue() {
+    return timestampValue;
   }
 
+  public void setIntervalDayTimeValue(HiveIntervalDayTime intervalDayTimeValue) {
+    this.intervalDayTimeValue = intervalDayTimeValue;
+  }
 
-  public PisaTimestamp getTimestampValue() {
-    return timestampValue;
+  public HiveIntervalDayTime getIntervalDayTimeValue() {
+    return intervalDayTimeValue;
   }
 
   public String getTypeString() {

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
index 8d2a186..fafacce 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateColumn.java
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.*;
@@ -26,7 +28,7 @@ import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 // A type date (LongColumnVector storing epoch days) minus a type date produces a
-// type interval_day_time (TimestampColumnVector storing nanosecond interval in 2 longs).
+// type interval_day_time (IntervalDayTimeColumnVector storing nanosecond interval in 2 longs).
 public class DateColSubtractDateColumn extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
@@ -34,16 +36,16 @@ public class DateColSubtractDateColumn extends VectorExpression {
   private int colNum1;
   private int colNum2;
   private int outputColumn;
-  private PisaTimestamp scratchPisaTimestamp1;
-  private PisaTimestamp scratchPisaTimestamp2;
+  private Timestamp scratchTimestamp1;
+  private Timestamp scratchTimestamp2;
   private DateTimeMath dtm = new DateTimeMath();
 
   public DateColSubtractDateColumn(int colNum1, int colNum2, int outputColumn) {
     this.colNum1 = colNum1;
     this.colNum2 = colNum2;
     this.outputColumn = outputColumn;
-    scratchPisaTimestamp1 = new PisaTimestamp();
-    scratchPisaTimestamp2 = new PisaTimestamp();
+    scratchTimestamp1 = new Timestamp(0);
+    scratchTimestamp2 = new Timestamp(0);
   }
 
   public DateColSubtractDateColumn() {
@@ -63,7 +65,7 @@ public class DateColSubtractDateColumn extends VectorExpression {
     LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
 
     // Output is type interval_day_time.
-    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
 
     int[] sel = batch.selected;
     int n = batch.size;
@@ -80,73 +82,69 @@ public class DateColSubtractDateColumn extends VectorExpression {
       || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
       || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
 
-    // Handle nulls first  
+    // Handle nulls first
     NullUtil.propagateNullsColCol(
       inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
 
+    HiveIntervalDayTime resultIntervalDayTime = outputColVector.getScratchIntervalDayTime();
+
     /* Disregard nulls for processing. In other words,
      * the arithmetic operation is performed even if one or
      * more inputs are null. This is to improve speed by avoiding
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      outputColVector.subtract(
-          scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-          scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[0])),
-          0);
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+      outputColVector.setFromScratchIntervalDayTime(0);
     } else if (inputColVector1.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
       if (batch.selectedInUse) {
-        scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0]));
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              scratchPisaTimestamp1,
-              scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
-        scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0]));
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              scratchPisaTimestamp1,
-              scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     } else if (inputColVector2.isRepeating) {
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
       if (batch.selectedInUse) {
-        scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[0]));
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              scratchPisaTimestamp2,
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
-        scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[0]));
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              scratchPisaTimestamp2,
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     } else {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              scratchPisaTimestamp1.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              scratchPisaTimestamp2.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(scratchTimestamp1, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     }
@@ -157,7 +155,7 @@ public class DateColSubtractDateColumn extends VectorExpression {
      * in complex arithmetic expressions like col2 / (col1 - 1)
      * in the case when some col1 entries are null.
      */
-    NullUtil.setNullDataEntriesTimestamp(outputColVector, batch.selectedInUse, sel, n);
+    NullUtil.setNullDataEntriesIntervalDayTime(outputColVector, batch.selectedInUse, sel, n);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
index 3ea9331..a9ca93c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateColSubtractDateScalar.java
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -35,16 +36,17 @@ public class DateColSubtractDateScalar extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
   private int colNum;
-  private PisaTimestamp value;
+  private Timestamp value;
   private int outputColumn;
-  private PisaTimestamp scratchPisaTimestamp;
+  private Timestamp scratchTimestamp1;
   private DateTimeMath dtm = new DateTimeMath();
 
   public DateColSubtractDateScalar(int colNum, long value, int outputColumn) {
     this.colNum = colNum;
-    this.value = new PisaTimestamp().updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) value));
+    this.value = new Timestamp(0);
+    this.value.setTime(DateWritable.daysToMillis((int) value));
     this.outputColumn = outputColumn;
-    scratchPisaTimestamp = new PisaTimestamp();
+    scratchTimestamp1 = new Timestamp(0);
   }
 
   public DateColSubtractDateScalar() {
@@ -60,8 +62,8 @@ public class DateColSubtractDateScalar extends VectorExpression {
     // Input #1 is type date (epochDays).
     LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
 
-    // Output is type Timestamp.
-    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    // Output is type HiveIntervalDayTime.
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
 
     int[] sel = batch.selected;
     boolean[] inputIsNull = inputColVector1.isNull;
@@ -77,45 +79,40 @@ public class DateColSubtractDateScalar extends VectorExpression {
     }
 
     if (inputColVector1.isRepeating) {
-      outputColVector.subtract(
-          scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-          value,
-          0);
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+      outputColVector.setFromScratchIntervalDayTime(0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
       outputIsNull[0] = inputIsNull[0];
     } else if (inputColVector1.noNulls) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-              value,
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-              value,
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     } else /* there are nulls */ {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-              value,
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-              value,
-              i);
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.subtract(scratchTimestamp1, value, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
index a8cabb8..59cf9da 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DateScalarSubtractDateColumn.java
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
@@ -32,16 +33,17 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
   private int colNum;
-  private PisaTimestamp value;
+  private Timestamp value;
   private int outputColumn;
-  private PisaTimestamp scratchPisaTimestamp;
+  private Timestamp scratchTimestamp2;
   private DateTimeMath dtm = new DateTimeMath();
 
   public DateScalarSubtractDateColumn(long value, int colNum, int outputColumn) {
     this.colNum = colNum;
-    this.value = new PisaTimestamp().updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) value));
+    this.value = new Timestamp(0);
+    this.value.setTime(DateWritable.daysToMillis((int) value));
     this.outputColumn = outputColumn;
-    scratchPisaTimestamp = new PisaTimestamp();
+    scratchTimestamp2 = new Timestamp(0);
   }
 
   public DateScalarSubtractDateColumn() {
@@ -62,8 +64,8 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
     // Input #2 is type date (epochDays).
     LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum];
 
-    // Output is type Timestamp.
-    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
+    // Output is type HiveIntervalDayTime.
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
 
     int[] sel = batch.selected;
     boolean[] inputIsNull = inputColVector2.isNull;
@@ -80,46 +82,40 @@ public class DateScalarSubtractDateColumn extends VectorExpression {
     }
 
     if (inputColVector2.isRepeating) {
-      outputColVector.subtract(
-          value,
-          scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[0])),
-          0);
-
+      scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[0]));
+      dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+      outputColVector.setFromScratchIntervalDayTime(0);
       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
       outputIsNull[0] = inputIsNull[0];
     } else if (inputColVector2.noNulls) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              value,
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              value,
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
       }
     } else {                         /* there are nulls */
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.subtract(
-              value,
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.subtract(
-              value,
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector2[i])),
-              i);
+          scratchTimestamp2.setTime(DateWritable.daysToMillis((int) vector2[i]));
+          dtm.subtract(value, scratchTimestamp2, outputColVector.getScratchIntervalDayTime());
+          outputColVector.setFromScratchIntervalDayTime(i);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java
index 42e4984..25a276a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterTimestampColumnInList.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 import java.sql.Timestamp;
 import java.util.HashSet;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
@@ -35,7 +34,7 @@ public class FilterTimestampColumnInList extends VectorExpression implements ITi
   private Timestamp[] inListValues;
 
   // The set object containing the IN list.
-  private transient HashSet<PisaTimestamp> inSet;
+  private transient HashSet<Timestamp> inSet;
 
   public FilterTimestampColumnInList() {
     super();
@@ -58,9 +57,9 @@ public class FilterTimestampColumnInList extends VectorExpression implements ITi
     }
 
     if (inSet == null) {
-      inSet = new HashSet<PisaTimestamp>(inListValues.length);
+      inSet = new HashSet<Timestamp>(inListValues.length);
       for (Timestamp val : inListValues) {
-        inSet.add(new PisaTimestamp(val));
+        inSet.add(val);
       }
     }
 
@@ -74,16 +73,13 @@ public class FilterTimestampColumnInList extends VectorExpression implements ITi
       return;
     }
 
-    PisaTimestamp scratchTimestamp = new PisaTimestamp();
-
     if (inputColVector.noNulls) {
       if (inputColVector.isRepeating) {
 
         // All must be selected otherwise size would be zero
         // Repeating property will not change.
 
-        inputColVector.pisaTimestampUpdate(scratchTimestamp, 0);
-        if (!(inSet.contains(scratchTimestamp))) {
+        if (!(inSet.contains(inputColVector.asScratchTimestamp(0)))) {
           //Entire batch is filtered out.
           batch.size = 0;
         }
@@ -91,8 +87,7 @@ public class FilterTimestampColumnInList extends VectorExpression implements ITi
         int newSize = 0;
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          inputColVector.pisaTimestampUpdate(scratchTimestamp, i);
-          if (inSet.contains(scratchTimestamp)) {
+          if (inSet.contains(inputColVector.asScratchTimestamp(i))) {
             sel[newSize++] = i;
           }
         }
@@ -100,8 +95,7 @@ public class FilterTimestampColumnInList extends VectorExpression implements ITi
       } else {
         int newSize = 0;
         for(int i = 0; i != n; i++) {
-          inputColVector.pisaTimestampUpdate(scratchTimestamp, i);
-          if (inSet.contains(scratchTimestamp)) {
+          if (inSet.contains(inputColVector.asScratchTimestamp(i))) {
             sel[newSize++] = i;
           }
         }
@@ -116,8 +110,7 @@ public class FilterTimestampColumnInList extends VectorExpression implements ITi
         //All must be selected otherwise size would be zero
         //Repeating property will not change.
         if (!nullPos[0]) {
-          inputColVector.pisaTimestampUpdate(scratchTimestamp, 0);
-          if (!inSet.contains(scratchTimestamp)) {
+          if (!inSet.contains(inputColVector.asScratchTimestamp(0))) {
 
             //Entire batch is filtered out.
             batch.size = 0;
@@ -130,8 +123,7 @@ public class FilterTimestampColumnInList extends VectorExpression implements ITi
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           if (!nullPos[i]) {
-            inputColVector.pisaTimestampUpdate(scratchTimestamp, i);
-           if (inSet.contains(scratchTimestamp)) {
+           if (inSet.contains(inputColVector.asScratchTimestamp(i))) {
              sel[newSize++] = i;
            }
           }
@@ -143,8 +135,7 @@ public class FilterTimestampColumnInList extends VectorExpression implements ITi
         int newSize = 0;
         for(int i = 0; i != n; i++) {
           if (!nullPos[i]) {
-            inputColVector.pisaTimestampUpdate(scratchTimestamp, i);
-            if (inSet.contains(scratchTimestamp)) {
+            if (inSet.contains(inputColVector.asScratchTimestamp(i))) {
               sel[newSize++] = i;
             }
           }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java
index a6f8057..804923e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnColumn.java
@@ -17,24 +17,123 @@
  */
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 
 /**
  * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
  * The first is always a boolean (LongColumnVector).
  * The second and third are long columns or long expression results.
  */
-public class IfExprIntervalDayTimeColumnColumn extends IfExprTimestampColumnColumnBase {
+public class IfExprIntervalDayTimeColumnColumn extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
 
+  private int arg1Column, arg2Column, arg3Column;
+  private int outputColumn;
+
   public IfExprIntervalDayTimeColumnColumn(int arg1Column, int arg2Column, int arg3Column, int outputColumn) {
-    super(arg1Column, arg2Column, arg3Column, outputColumn);
+    this.arg1Column = arg1Column;
+    this.arg2Column = arg2Column;
+    this.arg3Column = arg3Column;
+    this.outputColumn = outputColumn;
   }
 
   public IfExprIntervalDayTimeColumnColumn() {
     super();
   }
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    IntervalDayTimeColumnVector arg2ColVector = (IntervalDayTimeColumnVector) batch.cols[arg2Column];
+    IntervalDayTimeColumnVector arg3ColVector = (IntervalDayTimeColumnVector) batch.cols[arg3Column];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg2ColVector.noNulls && arg3ColVector.noNulls;
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    /* All the code paths below propagate nulls even if neither arg2 nor arg3
+     * have nulls. This is to reduce the number of code paths and shorten the
+     * code, at the expense of maybe doing unnecessary work if neither input
+     * has nulls. This could be improved in the future by expanding the number
+     * of code paths.
+     */
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      } else {
+        arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      }
+      return;
+    }
+
+    // extend any repeating values and noNulls indicator in the inputs
+    arg2ColVector.flatten(batch.selectedInUse, sel, n);
+    arg3ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchIntervalDayTime(i) : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchIntervalDayTime(i) : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchIntervalDayTime(i) : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchIntervalDayTime(i) : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg2ColVector.unFlatten();
+    arg3ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "interval_day_time";
+  }
 
   @Override
   public VectorExpressionDescriptor.Descriptor getDescriptor() {

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java
index 4beb50a..8face7d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeColumnScalar.java
@@ -19,8 +19,10 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 
 /**
  * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
@@ -28,13 +30,20 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
  * The second is a column or non-constant expression result.
  * The third is a constant value.
  */
-public class IfExprIntervalDayTimeColumnScalar extends IfExprTimestampColumnScalarBase {
+public class IfExprIntervalDayTimeColumnScalar extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
 
+  private int arg1Column, arg2Column;
+  private HiveIntervalDayTime arg3Scalar;
+  private int outputColumn;
+
   public IfExprIntervalDayTimeColumnScalar(int arg1Column, int arg2Column, HiveIntervalDayTime arg3Scalar,
       int outputColumn) {
-    super(arg1Column, arg2Column, arg3Scalar.pisaTimestampUpdate(new PisaTimestamp()), outputColumn);
+    this.arg1Column = arg1Column;
+    this.arg2Column = arg2Column;
+    this.arg3Scalar = arg3Scalar;
+    this.outputColumn = outputColumn;
   }
 
   public IfExprIntervalDayTimeColumnScalar() {
@@ -42,6 +51,85 @@ public class IfExprIntervalDayTimeColumnScalar extends IfExprTimestampColumnScal
   }
 
   @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    IntervalDayTimeColumnVector arg2ColVector = (IntervalDayTimeColumnVector) batch.cols[arg2Column];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg2ColVector.noNulls; // nulls can only come from arg2
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        arg2ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      } else {
+        outputColVector.fill(arg3Scalar);
+      }
+      return;
+    }
+
+    // Extend any repeating values and noNulls indicator in the inputs to
+    // reduce the number of code paths needed below.
+    arg2ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchIntervalDayTime(i) : arg3Scalar);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchIntervalDayTime(i) : arg3Scalar);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchIntervalDayTime(i) : arg3Scalar);
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : false);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.asScratchIntervalDayTime(i) : arg3Scalar);
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2ColVector.isNull[i] : false);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg2ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "interval_day_time";
+  }
+
+  @Override
   public VectorExpressionDescriptor.Descriptor getDescriptor() {
     return (new VectorExpressionDescriptor.Builder())
         .setMode(

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java
index 5463c7c..40f2e08 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarColumn.java
@@ -19,8 +19,10 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 
 /**
  * Compute IF(expr1, expr2, expr3) for 3 input column expressions.
@@ -28,13 +30,20 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
  * The second is a column or non-constant expression result.
  * The third is a constant value.
  */
-public class IfExprIntervalDayTimeScalarColumn extends IfExprTimestampScalarColumnBase {
+public class IfExprIntervalDayTimeScalarColumn extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
 
+  private int arg1Column, arg3Column;
+  private HiveIntervalDayTime arg2Scalar;
+  private int outputColumn;
+
   public IfExprIntervalDayTimeScalarColumn(int arg1Column, HiveIntervalDayTime arg2Scalar, int arg3Column,
       int outputColumn) {
-    super(arg1Column, arg2Scalar.pisaTimestampUpdate(new PisaTimestamp()), arg3Column, outputColumn);
+    this.arg1Column = arg1Column;
+    this.arg2Scalar = arg2Scalar;
+    this.arg3Column = arg3Column;
+    this.outputColumn = outputColumn;
   }
 
   public IfExprIntervalDayTimeScalarColumn() {
@@ -42,6 +51,87 @@ public class IfExprIntervalDayTimeScalarColumn extends IfExprTimestampScalarColu
   }
 
   @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    IntervalDayTimeColumnVector arg3ColVector = (IntervalDayTimeColumnVector) batch.cols[arg3Column];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = arg3ColVector.noNulls; // nulls can only come from arg3 column vector
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        outputColVector.fill(arg2Scalar);
+      } else {
+        arg3ColVector.copySelected(batch.selectedInUse, sel, n, outputColVector);
+      }
+      return;
+    }
+
+    // Extend any repeating values and noNulls indicator in the inputs to
+    // reduce the number of code paths needed below.
+    // This could be optimized in the future by having separate paths
+    // for when arg3ColVector is repeating or has no nulls.
+    arg3ColVector.flatten(batch.selectedInUse, sel, n);
+
+    if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchIntervalDayTime(i));
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchIntervalDayTime(i));
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              false : arg3ColVector.isNull[i]);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3ColVector.asScratchIntervalDayTime(i));
+          outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              false : arg3ColVector.isNull[i]);
+        }
+      }
+    }
+
+    // restore repeating and no nulls indicators
+    arg3ColVector.unFlatten();
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "interval_day_time";
+  }
+
+  @Override
   public VectorExpressionDescriptor.Descriptor getDescriptor() {
     return (new VectorExpressionDescriptor.Builder())
         .setMode(

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java
index af2e0c0..43676dd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprIntervalDayTimeScalarScalar.java
@@ -18,9 +18,13 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import java.util.Arrays;
+
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 
 /**
  * Compute IF(expr1, expr2, expr3) for 3 input  expressions.
@@ -28,13 +32,21 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
  * The second is a constant value.
  * The third is a constant value.
  */
-public class IfExprIntervalDayTimeScalarScalar extends IfExprTimestampScalarScalarBase {
+public class IfExprIntervalDayTimeScalarScalar extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
 
+  private int arg1Column;
+  private HiveIntervalDayTime arg2Scalar;
+  private HiveIntervalDayTime arg3Scalar;
+  private int outputColumn;
+
   public IfExprIntervalDayTimeScalarScalar(int arg1Column, HiveIntervalDayTime arg2Scalar, HiveIntervalDayTime arg3Scalar,
       int outputColumn) {
-    super(arg1Column, arg2Scalar.pisaTimestampUpdate(new PisaTimestamp()), arg3Scalar.pisaTimestampUpdate(new PisaTimestamp()), outputColumn);
+    this.arg1Column = arg1Column;
+    this.arg2Scalar = arg2Scalar;
+    this.arg3Scalar = arg3Scalar;
+    this.outputColumn = outputColumn;
   }
 
   public IfExprIntervalDayTimeScalarScalar() {
@@ -42,6 +54,72 @@ public class IfExprIntervalDayTimeScalarScalar extends IfExprTimestampScalarScal
   }
 
   @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    LongColumnVector arg1ColVector = (LongColumnVector) batch.cols[arg1Column];
+    IntervalDayTimeColumnVector outputColVector = (IntervalDayTimeColumnVector) batch.cols[outputColumn];
+    int[] sel = batch.selected;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = false; // output is a scalar which we know is non null
+    outputColVector.isRepeating = false; // may override later
+    int n = batch.size;
+    long[] vector1 = arg1ColVector.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (arg1ColVector.isRepeating) {
+      if (vector1[0] == 1) {
+        outputColVector.fill(arg2Scalar);
+      } else {
+        outputColVector.fill(arg3Scalar);
+      }
+    } else if (arg1ColVector.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3Scalar);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3Scalar);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3Scalar);
+          outputIsNull[i] = false;
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
+              arg2Scalar : arg3Scalar);
+        }
+        Arrays.fill(outputIsNull, 0, n, false);
+      }
+    }
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "timestamp";
+  }
+
+  @Override
   public VectorExpressionDescriptor.Descriptor getDescriptor() {
     return (new VectorExpressionDescriptor.Builder())
         .setMode(

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java
index d3dd67d..8441863 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnColumnBase.java
@@ -89,13 +89,13 @@ public abstract class IfExprTimestampColumnColumnBase extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchPisaTimestamp(i) : arg3ColVector.asScratchPisaTimestamp(i));
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchTimestamp(i) : arg3ColVector.asScratchTimestamp(i));
           outputIsNull[i] = (vector1[i] == 1 ?
               arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchPisaTimestamp(i) : arg3ColVector.asScratchPisaTimestamp(i));
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchTimestamp(i) : arg3ColVector.asScratchTimestamp(i));
           outputIsNull[i] = (vector1[i] == 1 ?
               arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
         }
@@ -105,14 +105,14 @@ public abstract class IfExprTimestampColumnColumnBase extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
-              arg2ColVector.asScratchPisaTimestamp(i) : arg3ColVector.asScratchPisaTimestamp(i));
+              arg2ColVector.asScratchTimestamp(i) : arg3ColVector.asScratchTimestamp(i));
           outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
               arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
         }
       } else {
         for(int i = 0; i != n; i++) {
           outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
-              arg2ColVector.asScratchPisaTimestamp(i) : arg3ColVector.asScratchPisaTimestamp(i));
+              arg2ColVector.asScratchTimestamp(i) : arg3ColVector.asScratchTimestamp(i));
           outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
               arg2ColVector.isNull[i] : arg3ColVector.isNull[i]);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java
index 0660038..ae997e0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalar.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.sql.Timestamp;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
@@ -35,7 +34,7 @@ public class IfExprTimestampColumnScalar extends IfExprTimestampColumnScalarBase
 
   public IfExprTimestampColumnScalar(int arg1Column, int arg2Column, Timestamp arg3Scalar,
       int outputColumn) {
-    super(arg1Column, arg2Column, new PisaTimestamp(arg3Scalar), outputColumn);
+    super(arg1Column, arg2Column, arg3Scalar, outputColumn);
   }
 
   public IfExprTimestampColumnScalar() {

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java
index 8aaad3f..6b87ff2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampColumnScalarBase.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import java.sql.Timestamp;
 
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -38,10 +38,10 @@ public abstract class IfExprTimestampColumnScalarBase extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
   private int arg1Column, arg2Column;
-  private PisaTimestamp arg3Scalar;
+  private Timestamp arg3Scalar;
   private int outputColumn;
 
-  public IfExprTimestampColumnScalarBase(int arg1Column, int arg2Column, PisaTimestamp arg3Scalar,
+  public IfExprTimestampColumnScalarBase(int arg1Column, int arg2Column, Timestamp arg3Scalar,
       int outputColumn) {
     this.arg1Column = arg1Column;
     this.arg2Column = arg2Column;
@@ -91,11 +91,11 @@ public abstract class IfExprTimestampColumnScalarBase extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchPisaTimestamp(i) : arg3Scalar);
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchTimestamp(i) : arg3Scalar);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchPisaTimestamp(i) : arg3Scalar);
+          outputColVector.set(i, vector1[i] == 1 ? arg2ColVector.asScratchTimestamp(i) : arg3Scalar);
         }
       }
     } else /* there are nulls */ {
@@ -103,14 +103,14 @@ public abstract class IfExprTimestampColumnScalarBase extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
-              arg2ColVector.asScratchPisaTimestamp(i) : arg3Scalar);
+              arg2ColVector.asScratchTimestamp(i) : arg3Scalar);
           outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
               arg2ColVector.isNull[i] : false);
         }
       } else {
         for(int i = 0; i != n; i++) {
           outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
-              arg2ColVector.asScratchPisaTimestamp(i) : arg3Scalar);
+              arg2ColVector.asScratchTimestamp(i) : arg3Scalar);
           outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
               arg2ColVector.isNull[i] : false);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java
index 7f618cb..3d53df1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumn.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.sql.Timestamp;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 /**
@@ -35,7 +34,7 @@ public class IfExprTimestampScalarColumn extends IfExprTimestampScalarColumnBase
 
   public IfExprTimestampScalarColumn(int arg1Column, Timestamp arg2Scalar, int arg3Column,
       int outputColumn) {
-    super(arg1Column, new PisaTimestamp(arg2Scalar), arg3Column, outputColumn);
+    super(arg1Column, arg2Scalar, arg3Column, outputColumn);
   }
 
   public IfExprTimestampScalarColumn() {

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java
index 84d7655..2162f17 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarColumnBase.java
@@ -18,7 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import java.sql.Timestamp;
+
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
@@ -36,10 +37,10 @@ public abstract class IfExprTimestampScalarColumnBase extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
   private int arg1Column, arg3Column;
-  private PisaTimestamp arg2Scalar;
+  private Timestamp arg2Scalar;
   private int outputColumn;
 
-  public IfExprTimestampScalarColumnBase(int arg1Column, PisaTimestamp arg2Scalar, int arg3Column,
+  public IfExprTimestampScalarColumnBase(int arg1Column, Timestamp arg2Scalar, int arg3Column,
       int outputColumn) {
     this.arg1Column = arg1Column;
     this.arg2Scalar = arg2Scalar;
@@ -91,11 +92,11 @@ public abstract class IfExprTimestampScalarColumnBase extends VectorExpression {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchPisaTimestamp(i));
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchTimestamp(i));
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchPisaTimestamp(i));
+          outputColVector.set(i, vector1[i] == 1 ? arg2Scalar : arg3ColVector.asScratchTimestamp(i));
         }
       }
     } else /* there are nulls */ {
@@ -103,14 +104,14 @@ public abstract class IfExprTimestampScalarColumnBase extends VectorExpression {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
-              arg2Scalar : arg3ColVector.asScratchPisaTimestamp(i));
+              arg2Scalar : arg3ColVector.asScratchTimestamp(i));
           outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
               false : arg3ColVector.isNull[i]);
         }
       } else {
         for(int i = 0; i != n; i++) {
           outputColVector.set(i, !arg1ColVector.isNull[i] && vector1[i] == 1 ?
-              arg2Scalar : arg3ColVector.asScratchPisaTimestamp(i));
+              arg2Scalar : arg3ColVector.asScratchTimestamp(i));
           outputIsNull[i] = (!arg1ColVector.isNull[i] && vector1[i] == 1 ?
               false : arg3ColVector.isNull[i]);
         }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java
index 5286ea3..cd00d3a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalar.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 
 import java.sql.Timestamp;
@@ -35,7 +34,7 @@ public class IfExprTimestampScalarScalar extends IfExprTimestampScalarScalarBase
 
   public IfExprTimestampScalarScalar(int arg1Column, Timestamp arg2Scalar, Timestamp arg3Scalar,
       int outputColumn) {
-    super(arg1Column, new PisaTimestamp(arg2Scalar), new PisaTimestamp(arg3Scalar), outputColumn);
+    super(arg1Column, arg2Scalar, arg3Scalar, outputColumn);
   }
 
   public IfExprTimestampScalarScalar() {

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java
index 1aeabfc..707f574 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IfExprTimestampScalarScalarBase.java
@@ -18,11 +18,12 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+
+import java.sql.Timestamp;
 import java.util.Arrays;
 
 /**
@@ -36,11 +37,11 @@ public abstract class IfExprTimestampScalarScalarBase extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
   private int arg1Column;
-  private PisaTimestamp arg2Scalar;
-  private PisaTimestamp arg3Scalar;
+  private Timestamp arg2Scalar;
+  private Timestamp arg3Scalar;
   private int outputColumn;
 
-  public IfExprTimestampScalarScalarBase(int arg1Column, PisaTimestamp arg2Scalar, PisaTimestamp arg3Scalar,
+  public IfExprTimestampScalarScalarBase(int arg1Column, Timestamp arg2Scalar, Timestamp arg3Scalar,
       int outputColumn) {
     this.arg1Column = arg1Column;
     this.arg2Scalar = arg2Scalar;
@@ -116,8 +117,4 @@ public abstract class IfExprTimestampScalarScalarBase extends VectorExpression {
   public String getOutputType() {
     return "timestamp";
   }
-
-  public int getArg1Column() {
-    return arg1Column;
-  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/52016296/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
index 3c6824d..eb493bf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
@@ -22,6 +22,7 @@ import java.util.Arrays;
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.IntervalDayTimeColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
@@ -107,6 +108,31 @@ public class NullUtil {
     }
   }
 
+  /**
+   * Set the data value for all NULL entries to the designated NULL_VALUE.
+   */
+  public static void setNullDataEntriesIntervalDayTime(
+      IntervalDayTimeColumnVector v, boolean selectedInUse, int[] sel, int n) {
+    if (v.noNulls) {
+      return;
+    } else if (v.isRepeating && v.isNull[0]) {
+      v.setNullValue(0);
+    } else if (selectedInUse) {
+      for (int j = 0; j != n; j++) {
+        int i = sel[j];
+        if(v.isNull[i]) {
+          v.setNullValue(i);
+        }
+      }
+    } else {
+      for (int i = 0; i != n; i++) {
+        if(v.isNull[i]) {
+          v.setNullValue(i);
+        }
+      }
+    }
+  }
+
   // for use by Column-Scalar and Scalar-Column arithmetic for null propagation
   public static void setNullOutputEntriesColScalar(
       ColumnVector v, boolean selectedInUse, int[] sel, int n) {