You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2016/04/19 12:13:02 UTC

[08/20] hive git commit: HIVE-9862 Vectorized execution corrupts timestamp values (Matt McCline, reviewed by Jason Dere) HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdSampTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdSampTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdSampTimestamp.java
new file mode 100644
index 0000000..b3e1fae
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFStdSampTimestamp.java
@@ -0,0 +1,527 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+* VectorUDAFStdSampTimestamp. Vectorized implementation for VARIANCE aggregates.
+*/
+@Description(name = "stddev_samp",
+    value = "_FUNC_(x) - Returns the sample standard deviation of a set of numbers (vectorized, double)")
+public class VectorUDAFStdSampTimestamp extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+    /* class for storing the current aggregate value.
+    */
+    private static final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+      transient private double variance;
+
+      /**
+      * Value is explicitly (re)initialized in reset() (despite the init() bellow...)
+      */
+      transient private boolean isNull = true;
+
+      public void init() {
+        isNull = false;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+    }
+
+    private VectorExpression inputExpression;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private DoubleWritable resultVariance;
+    transient private Object[] partialResult;
+
+    transient private ObjectInspector soi;
+
+
+    public VectorUDAFStdSampTimestamp(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public VectorUDAFStdSampTimestamp() {
+      super();
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      resultVariance = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultVariance;
+      initPartialResultInspector();
+    }
+
+  private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("variance");
+
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex);
+      return myagg;
+    }
+
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls || !inputColVector.isNull[0]) {
+          iterateRepeatingNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize,
+            inputColVector.isNull, batch.selected);
+      }
+
+    }
+
+    private void  iterateRepeatingNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        double value,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          j);
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+          if(myagg.count > 1) {
+            double t = myagg.count*value - myagg.sum;
+            myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      for (int i=0; i< batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        double value = inputColVector.getDouble(selected[i]);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregateIndex,
+          i);
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+    throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls) {
+          iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+      }
+    }
+
+    private void  iterateRepeatingNoNulls(
+        Aggregation myagg,
+        double value,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      // TODO: conjure a formula w/o iterating
+      //
+
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // We pulled out i=0 so we can remove the count > 1 check in the loop
+      for (int i=1; i<batchSize; ++i) {
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(selected[0]);
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove the count > 1 check in the loop
+      //
+      for (int i=1; i< batchSize; ++i) {
+        value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(0);
+      myagg.sum += value;
+      myagg.count += 1;
+
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove count > 1 check
+      for (int i=1; i<batchSize; ++i) {
+        value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+        resultCount.set (myagg.count);
+        resultSum.set (myagg.sum);
+        resultVariance.set (myagg.variance);
+        return partialResult;
+      }
+    }
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return soi;
+    }
+
+  @Override
+  public int getAggregationBufferFixedSize() {
+      JavaDataModel model = JavaDataModel.get();
+      return JavaDataModel.alignUp(
+        model.object() +
+        model.primitive2()*3+
+        model.primitive1(),
+        model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    // No-op
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarPopTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarPopTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarPopTimestamp.java
new file mode 100644
index 0000000..970ec22
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarPopTimestamp.java
@@ -0,0 +1,527 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+* VectorUDAFVarPopTimestamp. Vectorized implementation for VARIANCE aggregates.
+*/
+@Description(name = "variance, var_pop",
+    value = "_FUNC_(x) - Returns the variance of a set of numbers (vectorized, timestamp)")
+public class VectorUDAFVarPopTimestamp extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+    /* class for storing the current aggregate value.
+    */
+    private static final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+      transient private double variance;
+
+      /**
+      * Value is explicitly (re)initialized in reset() (despite the init() bellow...)
+      */
+      transient private boolean isNull = true;
+
+      public void init() {
+        isNull = false;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+    }
+
+    private VectorExpression inputExpression;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private DoubleWritable resultVariance;
+    transient private Object[] partialResult;
+
+    transient private ObjectInspector soi;
+
+
+    public VectorUDAFVarPopTimestamp(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public VectorUDAFVarPopTimestamp() {
+      super();
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      resultVariance = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultVariance;
+      initPartialResultInspector();
+    }
+
+  private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("variance");
+
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex);
+      return myagg;
+    }
+
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls || !inputColVector.isNull[0]) {
+          iterateRepeatingNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize,
+            inputColVector.isNull, batch.selected);
+      }
+
+    }
+
+    private void  iterateRepeatingNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        double value,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          j);
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+          if(myagg.count > 1) {
+            double t = myagg.count*value - myagg.sum;
+            myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      for (int i=0; i< batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        double value = inputColVector.getDouble(selected[i]);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregateIndex,
+          i);
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+    throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls) {
+          iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+      }
+    }
+
+    private void  iterateRepeatingNoNulls(
+        Aggregation myagg,
+        double value,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      // TODO: conjure a formula w/o iterating
+      //
+
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // We pulled out i=0 so we can remove the count > 1 check in the loop
+      for (int i=1; i<batchSize; ++i) {
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(selected[0]);
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove the count > 1 check in the loop
+      //
+      for (int i=1; i< batchSize; ++i) {
+        value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(0);
+      myagg.sum += value;
+      myagg.count += 1;
+
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove count > 1 check
+      for (int i=1; i<batchSize; ++i) {
+        value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+        resultCount.set (myagg.count);
+        resultSum.set (myagg.sum);
+        resultVariance.set (myagg.variance);
+        return partialResult;
+      }
+    }
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return soi;
+    }
+
+  @Override
+  public int getAggregationBufferFixedSize() {
+      JavaDataModel model = JavaDataModel.get();
+      return JavaDataModel.alignUp(
+        model.object() +
+        model.primitive2()*3+
+        model.primitive1(),
+        model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    // No-op
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarSampTimestamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarSampTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarSampTimestamp.java
new file mode 100644
index 0000000..9af1a28
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/aggregates/VectorUDAFVarSampTimestamp.java
@@ -0,0 +1,527 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
+import org.apache.hadoop.hive.ql.exec.vector.VectorAggregationBufferRow;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.AggregationDesc;
+import org.apache.hadoop.hive.ql.util.JavaDataModel;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+
+/**
+* VectorUDAFVarSampTimestamp. Vectorized implementation for VARIANCE aggregates.
+*/
+@Description(name = "var_samp",
+    value = "_FUNC_(x) - Returns the sample variance of a set of numbers (vectorized, double)")
+public class VectorUDAFVarSampTimestamp extends VectorAggregateExpression {
+
+    private static final long serialVersionUID = 1L;
+
+    /**
+    /* class for storing the current aggregate value.
+    */
+    private static final class Aggregation implements AggregationBuffer {
+
+      private static final long serialVersionUID = 1L;
+
+      transient private double sum;
+      transient private long count;
+      transient private double variance;
+
+      /**
+      * Value is explicitly (re)initialized in reset() (despite the init() bellow...)
+      */
+      transient private boolean isNull = true;
+
+      public void init() {
+        isNull = false;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+
+      @Override
+      public int getVariableSize() {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public void reset () {
+        isNull = true;
+        sum = 0;
+        count = 0;
+        variance = 0;
+      }
+    }
+
+    private VectorExpression inputExpression;
+    transient private LongWritable resultCount;
+    transient private DoubleWritable resultSum;
+    transient private DoubleWritable resultVariance;
+    transient private Object[] partialResult;
+
+    transient private ObjectInspector soi;
+
+
+    public VectorUDAFVarSampTimestamp(VectorExpression inputExpression) {
+      this();
+      this.inputExpression = inputExpression;
+    }
+
+    public VectorUDAFVarSampTimestamp() {
+      super();
+      partialResult = new Object[3];
+      resultCount = new LongWritable();
+      resultSum = new DoubleWritable();
+      resultVariance = new DoubleWritable();
+      partialResult[0] = resultCount;
+      partialResult[1] = resultSum;
+      partialResult[2] = resultVariance;
+      initPartialResultInspector();
+    }
+
+  private void initPartialResultInspector() {
+        List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
+        foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+        foi.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector);
+
+        List<String> fname = new ArrayList<String>();
+        fname.add("count");
+        fname.add("sum");
+        fname.add("variance");
+
+        soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi);
+    }
+
+    private Aggregation getCurrentAggregationBuffer(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        int row) {
+      VectorAggregationBufferRow mySet = aggregationBufferSets[row];
+      Aggregation myagg = (Aggregation) mySet.getAggregationBuffer(aggregateIndex);
+      return myagg;
+    }
+
+
+    @Override
+    public void aggregateInputSelection(
+      VectorAggregationBufferRow[] aggregationBufferSets,
+      int aggregateIndex,
+      VectorizedRowBatch batch) throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls || !inputColVector.isNull[0]) {
+          iterateRepeatingNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNullsWithAggregationSelection(
+            aggregationBufferSets, aggregateIndex, inputColVector, batchSize,
+            inputColVector.isNull, batch.selected);
+      }
+
+    }
+
+    private void  iterateRepeatingNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        double value,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          j);
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+          if(myagg.count > 1) {
+            double t = myagg.count*value - myagg.sum;
+            myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+          }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      for (int i=0; i< batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        double value = inputColVector.getDouble(selected[i]);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    private void iterateNoSelectionHasNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          Aggregation myagg = getCurrentAggregationBuffer(
+            aggregationBufferSets,
+            aggregateIndex,
+          i);
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNullsWithAggregationSelection(
+        VectorAggregationBufferRow[] aggregationBufferSets,
+        int aggregateIndex,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      for (int i=0; i<batchSize; ++i) {
+        Aggregation myagg = getCurrentAggregationBuffer(
+          aggregationBufferSets,
+          aggregateIndex,
+          i);
+        if (myagg.isNull) {
+          myagg.init ();
+        }
+        double value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+      }
+    }
+
+    @Override
+    public void aggregateInput(AggregationBuffer agg, VectorizedRowBatch batch)
+    throws HiveException {
+
+      inputExpression.evaluate(batch);
+
+      TimestampColumnVector inputColVector = (TimestampColumnVector)batch.
+        cols[this.inputExpression.getOutputColumn()];
+
+      int batchSize = batch.size;
+
+      if (batchSize == 0) {
+        return;
+      }
+
+      Aggregation myagg = (Aggregation)agg;
+
+      if (inputColVector.isRepeating) {
+        if (inputColVector.noNulls) {
+          iterateRepeatingNoNulls(myagg, inputColVector.getDouble(0), batchSize);
+        }
+      }
+      else if (!batch.selectedInUse && inputColVector.noNulls) {
+        iterateNoSelectionNoNulls(myagg, inputColVector, batchSize);
+      }
+      else if (!batch.selectedInUse) {
+        iterateNoSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull);
+      }
+      else if (inputColVector.noNulls){
+        iterateSelectionNoNulls(myagg, inputColVector, batchSize, batch.selected);
+      }
+      else {
+        iterateSelectionHasNulls(myagg, inputColVector, batchSize, inputColVector.isNull, batch.selected);
+      }
+    }
+
+    private void  iterateRepeatingNoNulls(
+        Aggregation myagg,
+        double value,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      // TODO: conjure a formula w/o iterating
+      //
+
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // We pulled out i=0 so we can remove the count > 1 check in the loop
+      for (int i=1; i<batchSize; ++i) {
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull,
+        int[] selected) {
+
+      for (int j=0; j< batchSize; ++j) {
+        int i = selected[j];
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        int[] selected) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(selected[0]);
+      myagg.sum += value;
+      myagg.count += 1;
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove the count > 1 check in the loop
+      //
+      for (int i=1; i< batchSize; ++i) {
+        value = inputColVector.getDouble(selected[i]);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    private void iterateNoSelectionHasNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize,
+        boolean[] isNull) {
+
+      for(int i=0;i<batchSize;++i) {
+        if (!isNull[i]) {
+          double value = inputColVector.getDouble(i);
+          if (myagg.isNull) {
+            myagg.init ();
+          }
+          myagg.sum += value;
+          myagg.count += 1;
+        if(myagg.count > 1) {
+          double t = myagg.count*value - myagg.sum;
+          myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+        }
+        }
+      }
+    }
+
+    private void iterateNoSelectionNoNulls(
+        Aggregation myagg,
+        TimestampColumnVector inputColVector,
+        int batchSize) {
+
+      if (myagg.isNull) {
+        myagg.init ();
+      }
+
+      double value = inputColVector.getDouble(0);
+      myagg.sum += value;
+      myagg.count += 1;
+
+      if(myagg.count > 1) {
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+
+      // i=0 was pulled out to remove count > 1 check
+      for (int i=1; i<batchSize; ++i) {
+        value = inputColVector.getDouble(i);
+        myagg.sum += value;
+        myagg.count += 1;
+        double t = myagg.count*value - myagg.sum;
+        myagg.variance += (t*t) / ((double)myagg.count*(myagg.count-1));
+      }
+    }
+
+    @Override
+    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
+      return new Aggregation();
+    }
+
+    @Override
+    public void reset(AggregationBuffer agg) throws HiveException {
+      Aggregation myAgg = (Aggregation) agg;
+      myAgg.reset();
+    }
+
+    @Override
+    public Object evaluateOutput(
+        AggregationBuffer agg) throws HiveException {
+      Aggregation myagg = (Aggregation) agg;
+      if (myagg.isNull) {
+        return null;
+      }
+      else {
+        assert(0 < myagg.count);
+        resultCount.set (myagg.count);
+        resultSum.set (myagg.sum);
+        resultVariance.set (myagg.variance);
+        return partialResult;
+      }
+    }
+  @Override
+    public ObjectInspector getOutputObjectInspector() {
+      return soi;
+    }
+
+  @Override
+  public int getAggregationBufferFixedSize() {
+      JavaDataModel model = JavaDataModel.get();
+      return JavaDataModel.alignUp(
+        model.object() +
+        model.primitive2()*3+
+        model.primitive1(),
+        model.memoryAlign());
+  }
+
+  @Override
+  public void init(AggregationDesc desc) throws HiveException {
+    // No-op
+  }
+
+  public VectorExpression getInputExpression() {
+    return inputExpression;
+  }
+
+  public void setInputExpression(VectorExpression inputExpression) {
+    this.inputExpression = inputExpression;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
index d3a0f9f..20cfb89 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/udf/VectorUDFAdaptor.java
@@ -297,23 +297,14 @@ public class VectorUDFAdaptor extends VectorExpression {
         lv.vector[i] = ((WritableByteObjectInspector) outputOI).get(value);
       }
     } else if (outputOI instanceof WritableTimestampObjectInspector) {
-      LongColumnVector lv = (LongColumnVector) colVec;
+      TimestampColumnVector tv = (TimestampColumnVector) colVec;
       Timestamp ts;
       if (value instanceof Timestamp) {
         ts = (Timestamp) value;
       } else {
         ts = ((WritableTimestampObjectInspector) outputOI).getPrimitiveJavaObject(value);
       }
-      /* Calculate the number of nanoseconds since the epoch as a long integer. By convention
-       * that is how Timestamp values are operated on in a vector.
-       */
-      long l = ts.getTime() * 1000000  // Shift the milliseconds value over by 6 digits
-                                       // to scale for nanosecond precision.
-                                       // The milliseconds digits will by convention be all 0s.
-            + ts.getNanos() % 1000000; // Add on the remaining nanos.
-                                       // The % 1000000 operation removes the ms values
-                                       // so that the milliseconds are not counted twice.
-      lv.vector[i] = l;
+      tv.set(i, ts);
     } else if (outputOI instanceof WritableDateObjectInspector) {
       LongColumnVector lv = (LongColumnVector) colVec;
       Date ts;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
index 96df394..146b3f0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TreeReaderFactory.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
@@ -997,6 +998,7 @@ public class TreeReaderFactory {
     private final TimeZone readerTimeZone;
     private TimeZone writerTimeZone;
     private boolean hasSameTZRules;
+    private TimestampWritable scratchTimestampWritable;
 
     TimestampTreeReader(int columnId, boolean skipCorrupt) throws IOException {
       this(columnId, null, null, null, null, skipCorrupt);
@@ -1130,24 +1132,26 @@ public class TreeReaderFactory {
 
     @Override
     public Object nextVector(Object previousVector, long batchSize) throws IOException {
-      final LongColumnVector result;
+      final TimestampColumnVector result;
       if (previousVector == null) {
-        result = new LongColumnVector();
+        result = new TimestampColumnVector();
       } else {
-        result = (LongColumnVector) previousVector;
+        result = (TimestampColumnVector) previousVector;
       }
 
       result.reset();
-      Object obj = null;
+      if (scratchTimestampWritable == null) {
+        scratchTimestampWritable = new TimestampWritable();
+      }
+      Object obj;
       for (int i = 0; i < batchSize; i++) {
-        obj = next(obj);
+        obj = next(scratchTimestampWritable);
         if (obj == null) {
           result.noNulls = false;
           result.isNull[i] = true;
         } else {
           TimestampWritable writable = (TimestampWritable) obj;
-          Timestamp timestamp = writable.getTimestamp();
-          result.vector[i] = TimestampUtils.getTimeNanoSec(timestamp);
+          result.set(i, writable.getTimestamp());
         }
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java
index 3c0d590..2d73431 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/TypeDescription.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 
 import java.util.ArrayList;
@@ -278,9 +279,10 @@ public class TypeDescription {
       case SHORT:
       case INT:
       case LONG:
-      case TIMESTAMP:
       case DATE:
         return new LongColumnVector();
+      case TIMESTAMP:
+        return new TimestampColumnVector();
       case FLOAT:
       case DOUBLE:
         return new DoubleColumnVector();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
index 21e6ff7..3ff6acf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthTimestamp;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -47,7 +48,7 @@ import org.apache.hadoop.io.Text;
     + "4. A day-time interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  30")
-@VectorizedExpressions({VectorUDFDayOfMonthLong.class, VectorUDFDayOfMonthString.class})
+@VectorizedExpressions({VectorUDFDayOfMonthDate.class, VectorUDFDayOfMonthString.class, VectorUDFDayOfMonthTimestamp.class})
 public class UDFDayOfMonth extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
index 835cecc..f6f9c93 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFHour.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFHourTimestamp;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -47,7 +48,7 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  12\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  12")
-@VectorizedExpressions({VectorUDFHourLong.class, VectorUDFHourString.class})
+@VectorizedExpressions({VectorUDFHourDate.class, VectorUDFHourString.class, VectorUDFHourTimestamp.class})
 public class UDFHour extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
   private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
index a9f5393..606b6d6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMinute.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMinuteTimestamp;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -47,7 +48,7 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  58\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  58")
-@VectorizedExpressions({VectorUDFMinuteLong.class, VectorUDFMinuteString.class})
+@VectorizedExpressions({VectorUDFMinuteDate.class, VectorUDFMinuteString.class, VectorUDFMinuteTimestamp.class})
 public class UDFMinute extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
   private final SimpleDateFormat formatter2 = new SimpleDateFormat("HH:mm:ss");

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
index 3365804..1975535 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthTimestamp;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -47,12 +48,12 @@ import org.apache.hadoop.io.Text;
     + "4. A year-month interval value"
     + "Example:\n"
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  7")
-@VectorizedExpressions({VectorUDFMonthLong.class, VectorUDFMonthString.class})
+@VectorizedExpressions({VectorUDFMonthDate.class, VectorUDFMonthString.class, VectorUDFMonthTimestamp.class})
 public class UDFMonth extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();
 
-  private IntWritable result = new IntWritable();
+  private final IntWritable result = new IntWritable();
 
   public UDFMonth() {
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
index e7c3d67..11764cf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFSecond.java
@@ -27,8 +27,9 @@ import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFSecondTimestamp;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -50,7 +51,7 @@ import org.apache.hive.common.util.DateUtils;
     + "  > SELECT _FUNC_('2009-07-30 12:58:59') FROM src LIMIT 1;\n"
     + "  59\n"
     + "  > SELECT _FUNC_('12:58:59') FROM src LIMIT 1;\n" + "  59")
-@VectorizedExpressions({VectorUDFSecondLong.class, VectorUDFSecondString.class})
+@VectorizedExpressions({VectorUDFSecondDate.class, VectorUDFSecondString.class, VectorUDFSecondTimestamp.class})
 public class UDFSecond extends UDF {
   private final SimpleDateFormat formatter1 = new SimpleDateFormat(
       "yyyy-MM-dd HH:mm:ss");

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
index 9786636..17b892c 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToBoolean;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToBooleanViaDoubleToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanViaLongToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDateToBooleanViaLongToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToBooleanViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToBoolean;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -45,7 +45,7 @@ import org.apache.hadoop.io.Text;
  *
  */
 @VectorizedExpressions({CastLongToBooleanViaLongToLong.class,
-  CastDateToBooleanViaLongToLong.class, CastTimestampToBooleanViaLongToLong.class,
+  CastDateToBooleanViaLongToLong.class, CastTimestampToBoolean.class,
   CastDoubleToBooleanViaDoubleToLong.class, CastDecimalToBoolean.class})
 public class UDFToBoolean extends UDF {
   private final BooleanWritable booleanWritable = new BooleanWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
index d274531..159dd0f 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -40,7 +40,7 @@ import org.apache.hadoop.io.Text;
  * UDFToByte.
  *
  */
-@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class,
+@VectorizedExpressions({CastTimestampToLong.class, CastDoubleToLong.class,
     CastDecimalToLong.class})
 public class UDFToByte extends UDF {
   private final ByteWritable byteWritable = new ByteWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
index 8084537..5763947 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToDouble;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToDoubleViaLongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToDouble;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -38,7 +38,7 @@ import org.apache.hadoop.io.Text;
  * UDFToDouble.
  *
  */
-@VectorizedExpressions({CastTimestampToDoubleViaLongToDouble.class, CastLongToDouble.class,
+@VectorizedExpressions({CastTimestampToDouble.class, CastLongToDouble.class,
     CastDecimalToDouble.class})
 public class UDFToDouble extends UDF {
   private final DoubleWritable doubleWritable = new DoubleWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
index 129da43..e2183f4 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToDouble;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToDoubleViaLongToDouble;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToDouble;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -39,7 +39,7 @@ import org.apache.hadoop.io.Text;
  * UDFToFloat.
  *
  */
-@VectorizedExpressions({CastTimestampToDoubleViaLongToDouble.class, CastLongToDouble.class,
+@VectorizedExpressions({CastTimestampToDouble.class, CastLongToDouble.class,
     CastDecimalToDouble.class})
 public class UDFToFloat extends UDF {
   private final FloatWritable floatWritable = new FloatWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
index b669754..5f5d1fe 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToLong;
 import org.apache.hadoop.hive.ql.io.RecordIdentifier;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -41,7 +41,7 @@ import org.apache.hadoop.io.Text;
  * UDFToInteger.
  *
  */
-@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class,
+@VectorizedExpressions({CastTimestampToLong.class, CastDoubleToLong.class,
     CastDecimalToLong.class})
 public class UDFToInteger extends UDF {
   private final IntWritable intWritable = new IntWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
index 04ff7cf..3eeabea 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -40,7 +40,7 @@ import org.apache.hadoop.io.Text;
  * UDFToLong.
  *
  */
-@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class,
+@VectorizedExpressions({CastTimestampToLong.class, CastDoubleToLong.class,
     CastDecimalToLong.class})
 public class UDFToLong extends UDF {
   private final LongWritable longWritable = new LongWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
index 5315552..b9065b2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java
@@ -23,7 +23,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToLongViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CastTimestampToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -41,7 +41,7 @@ import org.apache.hadoop.io.Text;
  * UDFToShort.
  *
  */
-@VectorizedExpressions({CastTimestampToLongViaLongToLong.class, CastDoubleToLong.class,
+@VectorizedExpressions({CastTimestampToLong.class, CastDoubleToLong.class,
     CastDecimalToLong.class})
 public class UDFToShort extends UDF {
   ShortWritable shortWritable = new ShortWritable();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
index f076d1d..d65b3ab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearTimestamp;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.io.IntWritable;
@@ -44,7 +45,7 @@ import org.apache.hadoop.io.Text;
     + "  > SELECT _FUNC_('2008-02-20') FROM src LIMIT 1;\n"
     + "  8\n"
     + "  > SELECT _FUNC_('1980-12-31 12:59:59') FROM src LIMIT 1;\n" + "  1")
-@VectorizedExpressions({VectorUDFWeekOfYearLong.class, VectorUDFWeekOfYearString.class})
+@VectorizedExpressions({VectorUDFWeekOfYearDate.class, VectorUDFWeekOfYearString.class, VectorUDFWeekOfYearTimestamp.class})
 public class UDFWeekOfYear extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
index 34b0c47..92ee700 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
@@ -26,8 +26,9 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearDate;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -47,7 +48,7 @@ import org.apache.hadoop.io.Text;
     + "4. A year-month interval value"
     + "Example:\n "
     + "  > SELECT _FUNC_('2009-07-30') FROM src LIMIT 1;\n" + "  2009")
-@VectorizedExpressions({VectorUDFYearLong.class, VectorUDFYearString.class})
+@VectorizedExpressions({VectorUDFYearDate.class, VectorUDFYearString.class, VectorUDFYearTimestamp.class})
 public class UDFYear extends UDF {
   private final SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private final Calendar calendar = Calendar.getInstance();

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
index 10e648e..8c376a0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDate.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateString;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -50,7 +51,7 @@ import org.apache.hadoop.io.Text;
     extended = "Example:\n "
         + "  > SELECT _FUNC_('2009-07-30 04:17:52') FROM src LIMIT 1;\n"
         + "  '2009-07-30'")
-@VectorizedExpressions({VectorUDFDateString.class, VectorUDFDateLong.class})
+@VectorizedExpressions({VectorUDFDateString.class, VectorUDFDateLong.class, VectorUDFDateTimestamp.class})
 public class GenericUDFDate extends GenericUDF {
   private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
   private transient TimestampConverter timestampConverter;

http://git-wip-us.apache.org/repos/asf/hive/blob/130293e5/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
index 568fd46..b5b7153 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
@@ -28,8 +28,8 @@ import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongColumnLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnLongScalar;
@@ -42,12 +42,20 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarLon
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarDoubleScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprIntervalDayTimeColumnColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprIntervalDayTimeColumnScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprIntervalDayTimeScalarColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprIntervalDayTimeScalarScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnCharScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnVarCharScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampColumnColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampColumnScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampScalarColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprTimestampScalarScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarCharScalar;
@@ -76,6 +84,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStri
   IfExprLongScalarDoubleColumn.class, IfExprDoubleScalarLongColumn.class,
   IfExprLongScalarLongScalar.class, IfExprDoubleScalarDoubleScalar.class,
   IfExprLongScalarDoubleScalar.class, IfExprDoubleScalarLongScalar.class,
+
   IfExprStringGroupColumnStringGroupColumn.class,
   IfExprStringGroupColumnStringScalar.class,
   IfExprStringGroupColumnCharScalar.class, IfExprStringGroupColumnVarCharScalar.class,
@@ -83,7 +92,12 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStri
   IfExprCharScalarStringGroupColumn.class, IfExprVarCharScalarStringGroupColumn.class,
   IfExprStringScalarStringScalar.class,
   IfExprStringScalarCharScalar.class, IfExprStringScalarVarCharScalar.class,
-  IfExprCharScalarStringScalar.class, IfExprVarCharScalarStringScalar.class
+  IfExprCharScalarStringScalar.class, IfExprVarCharScalarStringScalar.class,
+
+  IfExprIntervalDayTimeColumnColumn.class, IfExprIntervalDayTimeColumnScalar.class,
+  IfExprIntervalDayTimeScalarColumn.class, IfExprIntervalDayTimeScalarScalar.class,
+  IfExprTimestampColumnColumn.class, IfExprTimestampColumnScalar.class,
+  IfExprTimestampScalarColumn.class, IfExprTimestampScalarScalar.class,
 })
 public class GenericUDFIf extends GenericUDF {
   private transient ObjectInspector[] argumentOIs;