You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2014/09/05 21:15:48 UTC

svn commit: r1622763 [15/17] - in /hive/branches/tez: ./ ant/src/org/apache/hadoop/hive/ant/ hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/ itests/hive-unit/ itests/hive-u...

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java Fri Sep  5 19:15:44 2014
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.TruthValue;
@@ -908,10 +909,10 @@ class RecordReaderImpl implements Record
   }
 
   private static class BinaryTreeReader extends TreeReader{
-    private InStream stream;
-    private IntegerReader lengths = null;
+    protected InStream stream;
+    protected IntegerReader lengths = null;
 
-    private final LongColumnVector scratchlcv;
+    protected final LongColumnVector scratchlcv;
 
     BinaryTreeReader(Path path, int columnId, Configuration conf) {
       super(path, columnId, conf);
@@ -983,7 +984,7 @@ class RecordReaderImpl implements Record
       // Read present/isNull stream
       super.nextVector(result, batchSize);
 
-      BytesColumnVectorUtil.setRefToOrcByteArrays(stream, lengths, scratchlcv, result, batchSize);
+      BytesColumnVectorUtil.readOrcByteArrays(stream, lengths, scratchlcv, result, batchSize);
       return result;
     }
 
@@ -1376,12 +1377,13 @@ class RecordReaderImpl implements Record
     }
   }
 
+  // This class collects together very similar methods for reading an ORC vector of byte arrays and
+  // creating the BytesColumnVector.
+  //
   private static class BytesColumnVectorUtil {
-    // This method has the common code for reading in bytes into a BytesColumnVector.
-    // It is used by the BINARY, STRING, CHAR, VARCHAR types.
-    public static void setRefToOrcByteArrays(InStream stream, IntegerReader lengths, LongColumnVector scratchlcv,
-            BytesColumnVector result, long batchSize) throws IOException {
 
+    private static byte[] commonReadByteArrays(InStream stream, IntegerReader lengths, LongColumnVector scratchlcv,
+            BytesColumnVector result, long batchSize) throws IOException {
       // Read lengths
       scratchlcv.isNull = result.isNull;  // Notice we are replacing the isNull vector here...
       lengths.nextVector(scratchlcv, batchSize);
@@ -1409,11 +1411,20 @@ class RecordReaderImpl implements Record
         }
         len -= bytesRead;
         offset += bytesRead;
-      }
+      } 
+
+      return allBytes;
+    }
+
+    // This method has the common code for reading in bytes into a BytesColumnVector.
+    public static void readOrcByteArrays(InStream stream, IntegerReader lengths, LongColumnVector scratchlcv,
+            BytesColumnVector result, long batchSize) throws IOException {
+
+      byte[] allBytes = commonReadByteArrays(stream, lengths, scratchlcv, result, batchSize);
 
       // Too expensive to figure out 'repeating' by comparisons.
       result.isRepeating = false;
-      offset = 0;
+      int offset = 0;
       if (!scratchlcv.isRepeating) {
         for (int i = 0; i < batchSize; i++) {
           if (!scratchlcv.isNull[i]) {
@@ -1518,7 +1529,7 @@ class RecordReaderImpl implements Record
       // Read present/isNull stream
       super.nextVector(result, batchSize);
 
-      BytesColumnVectorUtil.setRefToOrcByteArrays(stream, lengths, scratchlcv, result, batchSize);
+      BytesColumnVectorUtil.readOrcByteArrays(stream, lengths, scratchlcv, result, batchSize);
       return result;
     }
 
@@ -1734,6 +1745,42 @@ class RecordReaderImpl implements Record
       result.enforceMaxLength(maxLength);
       return result;
     }
+
+    @Override
+    Object nextVector(Object previousVector, long batchSize) throws IOException {
+      // Get the vector of strings from StringTreeReader, then make a 2nd pass to
+      // adjust down the length (right trim and truncate) if necessary.
+      BytesColumnVector result = (BytesColumnVector) super.nextVector(previousVector, batchSize);
+
+      int adjustedDownLen;
+      if (result.isRepeating) {
+        if (result.noNulls || !result.isNull[0]) {
+          adjustedDownLen = StringExpr.rightTrimAndTruncate(result.vector[0], result.start[0], result.length[0], maxLength);
+          if (adjustedDownLen < result.length[0]) {
+            result.setRef(0, result.vector[0], result.start[0], adjustedDownLen);
+          }
+        }
+      } else {
+        if (result.noNulls){ 
+          for (int i = 0; i < batchSize; i++) {
+            adjustedDownLen = StringExpr.rightTrimAndTruncate(result.vector[i], result.start[i], result.length[i], maxLength);
+            if (adjustedDownLen < result.length[i]) {
+              result.setRef(i, result.vector[i], result.start[i], adjustedDownLen);
+            }
+          }
+        } else {
+          for (int i = 0; i < batchSize; i++) {
+            if (!result.isNull[i]) {
+              adjustedDownLen = StringExpr.rightTrimAndTruncate(result.vector[i], result.start[i], result.length[i], maxLength);
+              if (adjustedDownLen < result.length[i]) {
+                result.setRef(i, result.vector[i], result.start[i], adjustedDownLen);
+              }
+            }
+          }
+        }
+      }
+      return result;
+    }
   }
 
   private static class VarcharTreeReader extends StringTreeReader {
@@ -1762,6 +1809,42 @@ class RecordReaderImpl implements Record
       result.enforceMaxLength(maxLength);
       return result;
     }
+
+    @Override
+    Object nextVector(Object previousVector, long batchSize) throws IOException {
+      // Get the vector of strings from StringTreeReader, then make a 2nd pass to
+      // adjust down the length (truncate) if necessary.
+      BytesColumnVector result = (BytesColumnVector) super.nextVector(previousVector, batchSize);
+
+      int adjustedDownLen;
+      if (result.isRepeating) {
+      if (result.noNulls || !result.isNull[0]) {
+          adjustedDownLen = StringExpr.truncate(result.vector[0], result.start[0], result.length[0], maxLength);
+          if (adjustedDownLen < result.length[0]) {
+            result.setRef(0, result.vector[0], result.start[0], adjustedDownLen);
+          }
+        }
+      } else {
+        if (result.noNulls){ 
+          for (int i = 0; i < batchSize; i++) {
+            adjustedDownLen = StringExpr.truncate(result.vector[i], result.start[i], result.length[i], maxLength);
+            if (adjustedDownLen < result.length[i]) {
+              result.setRef(i, result.vector[i], result.start[i], adjustedDownLen);
+            }
+          }
+        } else {
+          for (int i = 0; i < batchSize; i++) {
+            if (!result.isNull[i]) {
+              adjustedDownLen = StringExpr.truncate(result.vector[i], result.start[i], result.length[i], maxLength);
+              if (adjustedDownLen < result.length[i]) {
+                result.setRef(i, result.vector[i], result.start[i], adjustedDownLen);
+              }
+            }
+          }
+        }
+      }
+      return result;
+    }
   }
 
   private static class StructTreeReader extends TreeReader {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java Fri Sep  5 19:15:44 2014
@@ -156,6 +156,10 @@ public class Vectorizer implements Physi
     // The regex matches only the "decimal" prefix of the type.
     patternBuilder.append("|decimal.*");
 
+    // CHAR and VARCHAR types can be specified with maximum length.
+    patternBuilder.append("|char.*");
+    patternBuilder.append("|varchar.*");
+
     supportedDataTypesPattern = Pattern.compile(patternBuilder.toString());
 
     supportedGenericUDFs.add(GenericUDFOPPlus.class);
@@ -248,6 +252,8 @@ public class Vectorizer implements Physi
     supportedGenericUDFs.add(GenericUDFTimestamp.class);
     supportedGenericUDFs.add(GenericUDFToDecimal.class);
     supportedGenericUDFs.add(GenericUDFToDate.class);
+    supportedGenericUDFs.add(GenericUDFToChar.class);
+    supportedGenericUDFs.add(GenericUDFToVarchar.class);
 
     // For conditional expressions
     supportedGenericUDFs.add(GenericUDFIf.class);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java Fri Sep  5 19:15:44 2014
@@ -76,4 +76,9 @@ public class CommandProcessorResponse {
   public String getSQLState() { return SQLState; }
   public Schema getSchema() { return resSchema; }
   public Throwable getException() { return exception; }
+  public String toString() {
+    return "(" + responseCode + "," + errorMessage + "," + SQLState + 
+      (resSchema == null ? "" : ",") +
+      (exception == null ? "" : exception.getMessage()) + ")";
+  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Fri Sep  5 19:15:44 2014
@@ -53,6 +53,9 @@ import org.apache.hadoop.hive.ql.exec.te
 import org.apache.hadoop.hive.ql.history.HiveHistory;
 import org.apache.hadoop.hive.ql.history.HiveHistoryImpl;
 import org.apache.hadoop.hive.ql.history.HiveHistoryProxyHandler;
+import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
+import org.apache.hadoop.hive.ql.lockmgr.TxnManagerFactory;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -211,6 +214,29 @@ public class SessionState {
   private String hdfsScratchDirURIString;
 
   /**
+   * Transaction manager to use for this session.  This is instantiated lazily by
+   * {@link #initTxnMgr(org.apache.hadoop.hive.conf.HiveConf)}
+   */
+  private HiveTxnManager txnMgr = null;
+
+  /**
+   * When {@link #setCurrentTxn(long)} is set to this or {@link #getCurrentTxn()}} returns this it
+   * indicates that there is not a current transaction in this session.
+  */
+  public static final long NO_CURRENT_TXN = -1L;
+
+  /**
+   * Transaction currently open
+   */
+  private long currentTxn = NO_CURRENT_TXN;
+
+  /**
+   * Whether we are in auto-commit state or not.  Currently we are always in auto-commit,
+   * so there are not setters for this yet.
+   */
+  private boolean txnAutoCommit = true;
+
+  /**
    * Get the lineage state stored in this session.
    *
    * @return LineageState
@@ -312,6 +338,37 @@ public class SessionState {
   }
 
   /**
+   * Initialize the transaction manager.  This is done lazily to avoid hard wiring one
+   * transaction manager at the beginning of the session.  In general users shouldn't change
+   * this, but it's useful for testing.
+   * @param conf Hive configuration to initialize transaction manager
+   * @return transaction manager
+   * @throws LockException
+   */
+  public HiveTxnManager initTxnMgr(HiveConf conf) throws LockException {
+    if (txnMgr == null) {
+      txnMgr = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf);
+    }
+    return txnMgr;
+  }
+
+  public HiveTxnManager getTxnMgr() {
+    return txnMgr;
+  }
+
+  public long getCurrentTxn() {
+    return currentTxn;
+  }
+
+  public void setCurrentTxn(long currTxn) {
+    currentTxn = currTxn;
+  }
+
+  public boolean isAutoCommit() {
+    return txnAutoCommit;
+  }
+
+  /**
    * Singleton Session object per thread.
    *
    **/
@@ -1100,6 +1157,7 @@ public class SessionState {
   }
 
   public void close() throws IOException {
+    if (txnMgr != null) txnMgr.closeTxnManager();
     JavaUtils.closeClassLoadersTo(conf.getClassLoader(), parentLoader);
     File resourceDir =
         new File(getConf().getVar(HiveConf.ConfVars.DOWNLOADED_RESOURCES_DIR));

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java Fri Sep  5 19:15:44 2014
@@ -41,8 +41,10 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
 import org.apache.hadoop.hive.ql.plan.ColStatistics;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnListDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
 import org.apache.hadoop.hive.ql.plan.Statistics;
@@ -975,6 +977,22 @@ public class StatsUtils {
       colName = ennd.getName();
       colType = "null";
       numNulls = numRows;
+    } else if (end instanceof ExprNodeColumnListDesc) {
+
+      // column list
+      ExprNodeColumnListDesc encd = (ExprNodeColumnListDesc) end;
+      colName = Joiner.on(",").join(encd.getCols());
+      colType = "array";
+      countDistincts = numRows;
+      oi = encd.getWritableObjectInspector();
+    } else if (end instanceof ExprNodeFieldDesc) {
+
+      // field within complex type
+      ExprNodeFieldDesc enfd = (ExprNodeFieldDesc) end;
+      colName = enfd.getFieldName();
+      colType = enfd.getTypeString();
+      countDistincts = numRows;
+      oi = enfd.getWritableObjectInspector();
     }
 
     if (colType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorMR.java Fri Sep  5 19:15:44 2014
@@ -100,7 +100,7 @@ public class CompactorMR {
    * @throws java.io.IOException if the job fails
    */
   void run(HiveConf conf, String jobName, Table t, StorageDescriptor sd,
-           ValidTxnList txns, boolean isMajor) throws IOException {
+           ValidTxnList txns, boolean isMajor, Worker.StatsUpdater su) throws IOException {
     JobConf job = new JobConf(conf);
     job.setJobName(jobName);
     job.setOutputKeyClass(NullWritable.class);
@@ -182,6 +182,7 @@ public class CompactorMR {
     LOG.debug("Setting maximume transaction to " + maxTxn);
 
     JobClient.runJob(job).waitForCompletion();
+    su.gatherStats();
   }
 
   /**

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java Fri Sep  5 19:15:44 2014
@@ -20,20 +20,28 @@ package org.apache.hadoop.hive.ql.txn.co
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.ValidTxnList;
-import org.apache.hadoop.hive.metastore.api.CompactionType;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
 import org.apache.hadoop.hive.metastore.txn.TxnHandler;
-import org.apache.hadoop.io.Text;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 
+import java.io.IOException;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.security.PrivilegedExceptionAction;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
 
 /**
  * A class to do compactions.  This will run in a separate thread.  It will spin on the
@@ -110,7 +118,7 @@ public class Worker extends CompactorThr
           continue;
         }
 
-        final boolean isMajor = (ci.type == CompactionType.MAJOR);
+        final boolean isMajor = ci.isMajorCompaction();
         final ValidTxnList txns =
             TxnHandler.createValidTxnList(txnHandler.getOpenTxns());
         final StringBuffer jobName = new StringBuffer(name);
@@ -129,17 +137,19 @@ public class Worker extends CompactorThr
         LOG.info("Starting " + ci.type.toString() + " compaction for " +
             ci.getFullPartitionName());
 
+        final StatsUpdater su = StatsUpdater.init(ci, txnHandler.findColumnsWithStats(ci), conf,
+          runJobAsSelf(runAs) ? runAs : t.getOwner());
         final CompactorMR mr = new CompactorMR();
         try {
           if (runJobAsSelf(runAs)) {
-            mr.run(conf, jobName.toString(), t, sd, txns, isMajor);
+            mr.run(conf, jobName.toString(), t, sd, txns, isMajor, su);
           } else {
             UserGroupInformation ugi = UserGroupInformation.createProxyUser(t.getOwner(),
               UserGroupInformation.getLoginUser());
             ugi.doAs(new PrivilegedExceptionAction<Object>() {
               @Override
               public Object run() throws Exception {
-                mr.run(conf, jobName.toString(), t, sd, txns, isMajor);
+                mr.run(conf, jobName.toString(), t, sd, txns, isMajor, su);
                 return null;
               }
             });
@@ -161,11 +171,95 @@ public class Worker extends CompactorThr
   public void init(BooleanPointer stop) throws MetaException {
     super.init(stop);
 
-    StringBuffer name = new StringBuffer(hostname());
+    StringBuilder name = new StringBuilder(hostname());
     name.append("-");
     name.append(getId());
     this.name = name.toString();
     setName(name.toString());
   }
 
+  static final class StatsUpdater {
+    static final private Log LOG = LogFactory.getLog(StatsUpdater.class);
+
+    public static StatsUpdater init(CompactionInfo ci, List<String> columnListForStats,
+                                     HiveConf conf, String userName) {
+      return new StatsUpdater(ci, columnListForStats, conf, userName);
+    }
+    /**
+     * list columns for which to compute stats.  This maybe empty which means no stats gathering
+     * is needed.
+     */
+    private final List<String> columnList;
+    private final HiveConf conf;
+    private final String userName;
+    private final CompactionInfo ci;
+      
+    private StatsUpdater(CompactionInfo ci, List<String> columnListForStats,
+                         HiveConf conf, String userName) {
+      this.conf = conf;
+      this.userName = userName;
+      this.ci = ci;
+      if(!ci.isMajorCompaction() || columnListForStats == null || columnListForStats.isEmpty()) {
+        columnList = Collections.emptyList();
+        return;
+      }
+      columnList = columnListForStats;
+    }
+
+    /**
+     * todo: what should this do on failure?  Should it rethrow? Invalidate stats?
+     */
+    void gatherStats() throws IOException {
+      if(!ci.isMajorCompaction()) {
+        return;
+      }
+      if(columnList.isEmpty()) {
+        LOG.debug("No existing stats for " + ci.dbname + "." + ci.tableName + " found.  Will not run analyze.");
+        return;//nothing to do
+      }
+      //e.g. analyze table page_view partition(dt='10/15/2014',country=’US’)
+      // compute statistics for columns viewtime
+      StringBuilder sb = new StringBuilder("analyze table ").append(ci.dbname).append(".").append(ci.tableName);
+      if(ci.partName != null) {
+        try {
+          sb.append(" partition(");
+          Map<String, String> partitionColumnValues = Warehouse.makeEscSpecFromName(ci.partName);
+          for(Map.Entry<String, String> ent : partitionColumnValues.entrySet()) {
+            sb.append(ent.getKey()).append("='").append(ent.getValue()).append("'");
+          }
+          sb.append(")");
+        }
+        catch(MetaException ex) {
+          throw new IOException(ex);
+        }
+      }
+      sb.append(" compute statistics for columns ");
+      for(String colName : columnList) {
+        sb.append(colName).append(",");
+      }
+      sb.setLength(sb.length() - 1);//remove trailing ,
+      LOG.debug("running '" + sb.toString() + "'");
+      Driver d = new Driver(conf, userName);
+      SessionState localSession = null;
+      if(SessionState.get() == null) {
+         localSession = SessionState.start(new SessionState(conf));
+      }
+      try {
+        CommandProcessorResponse cpr = d.run(sb.toString());
+        if (cpr.getResponseCode() != 0) {
+          throw new IOException("Could not update stats for table " + ci.getFullTableName() +
+            (ci.partName == null ? "" : "/" + ci.partName) + " due to: " + cpr);
+        }
+      }
+      catch(CommandNeedRetryException cnre) {
+        throw new IOException("Could not update stats for table " + ci.getFullTableName() +
+          (ci.partName == null ? "" : "/" + ci.partName) + " due to: " + cnre.getMessage());
+      }
+      finally {
+        if(localSession != null) {
+          localSession.close();
+        }
+      }
+    }
+  }
 }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java Fri Sep  5 19:15:44 2014
@@ -25,6 +25,8 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToBoolean;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToBooleanViaDoubleToLong;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastLongToBooleanViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDateToBooleanViaLongToLong;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastTimestampToBooleanViaLongToLong;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -43,6 +45,7 @@ import org.apache.hadoop.io.Text;
  *
  */
 @VectorizedExpressions({CastLongToBooleanViaLongToLong.class,
+  CastDateToBooleanViaLongToLong.class, CastTimestampToBooleanViaLongToLong.class,
   CastDoubleToBooleanViaDoubleToLong.class, CastDecimalToBoolean.class})
 public class UDFToBoolean extends UDF {
   private final BooleanWritable booleanWritable = new BooleanWritable();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcat.java Fri Sep  5 19:15:44 2014
@@ -23,9 +23,13 @@ import org.apache.hadoop.hive.common.typ
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.StringConcatColCol;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.StringConcatColScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.StringConcatScalarCol;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringGroupConcatColCol;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringGroupColConcatStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringGroupColConcatCharScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringGroupColConcatVarCharScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.StringScalarConcatStringGroupCol;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.CharScalarConcatStringGroupCol;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.VarCharScalarConcatStringGroupCol;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -49,8 +53,11 @@ extended = "Returns NULL if any argument
 + "Example:\n"
 + "  > SELECT _FUNC_('abc', 'def') FROM src LIMIT 1;\n"
 + "  'abcdef'")
-@VectorizedExpressions({StringConcatColCol.class, StringConcatColScalar.class,
-    StringConcatScalarCol.class})
+@VectorizedExpressions({StringGroupConcatColCol.class,
+    StringGroupColConcatStringScalar.class,
+    StringGroupColConcatCharScalar.class, StringGroupColConcatVarCharScalar.class,
+    StringScalarConcatStringGroupCol.class,
+    CharScalarConcatStringGroupCol.class, VarCharScalarConcatStringGroupCol.class})
 public class GenericUDFConcat extends GenericUDF {
   private transient ObjectInspector[] argumentOIs;
   private transient StringConverter[] stringConverters;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFIf.java Fri Sep  5 19:15:44 2014
@@ -41,10 +41,20 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarLongScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprLongScalarDoubleScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnCharScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnVarCharScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarCharScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarVarCharScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringScalar;
+
+
 
 /**
  * IF(expr1,expr2,expr3) <br>
@@ -60,8 +70,14 @@ import org.apache.hadoop.hive.ql.exec.ve
   IfExprLongScalarDoubleColumn.class, IfExprDoubleScalarLongColumn.class,
   IfExprLongScalarLongScalar.class, IfExprDoubleScalarDoubleScalar.class,
   IfExprLongScalarDoubleScalar.class, IfExprDoubleScalarLongScalar.class,
-  IfExprStringColumnStringColumn.class, IfExprStringColumnStringScalar.class,
-  IfExprStringScalarStringColumn.class, IfExprStringScalarStringScalar.class
+  IfExprStringGroupColumnStringGroupColumn.class,
+  IfExprStringGroupColumnStringScalar.class,
+  IfExprStringGroupColumnCharScalar.class, IfExprStringGroupColumnVarCharScalar.class,
+  IfExprStringScalarStringGroupColumn.class,
+  IfExprCharScalarStringGroupColumn.class, IfExprVarCharScalarStringGroupColumn.class,
+  IfExprStringScalarStringScalar.class,
+  IfExprStringScalarCharScalar.class, IfExprStringScalarVarCharScalar.class,
+  IfExprCharScalarStringScalar.class, IfExprVarCharScalarStringScalar.class,
 })
 public class GenericUDFIf extends GenericUDF {
   private transient ObjectInspector[] argumentOIs;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqual.java Fri Sep  5 19:15:44 2014
@@ -34,9 +34,14 @@ import org.apache.hadoop.hive.serde2.obj
   DoubleColEqualLongScalar.class, DoubleColEqualDoubleScalar.class,
   LongScalarEqualLongColumn.class, LongScalarEqualDoubleColumn.class,
   DoubleScalarEqualLongColumn.class, DoubleScalarEqualDoubleColumn.class,
-  StringColEqualStringColumn.class, StringColEqualStringScalar.class,
-  StringScalarEqualStringColumn.class, FilterStringColEqualStringColumn.class,
-  FilterStringColEqualStringScalar.class, FilterStringScalarEqualStringColumn.class,
+  StringGroupColEqualStringGroupColumn.class, FilterStringGroupColEqualStringGroupColumn.class,
+  StringGroupColEqualStringScalar.class,
+  StringGroupColEqualVarCharScalar.class, StringGroupColEqualCharScalar.class,
+  StringScalarEqualStringGroupColumn.class,
+  VarCharScalarEqualStringGroupColumn.class, CharScalarEqualStringGroupColumn.class,
+  FilterStringGroupColEqualStringScalar.class, FilterStringScalarEqualStringGroupColumn.class,
+  FilterStringGroupColEqualVarCharScalar.class, FilterVarCharScalarEqualStringGroupColumn.class,
+  FilterStringGroupColEqualCharScalar.class, FilterCharScalarEqualStringGroupColumn.class,
   FilterLongColEqualLongColumn.class, FilterLongColEqualDoubleColumn.class,
   FilterDoubleColEqualLongColumn.class, FilterDoubleColEqualDoubleColumn.class,
   FilterLongColEqualLongScalar.class, FilterLongColEqualDoubleScalar.class,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java Fri Sep  5 19:15:44 2014
@@ -35,9 +35,14 @@ import org.apache.hadoop.io.Text;
   DoubleColGreaterEqualLongScalar.class, DoubleColGreaterEqualDoubleScalar.class,
   LongScalarGreaterEqualLongColumn.class, LongScalarGreaterEqualDoubleColumn.class,
   DoubleScalarGreaterEqualLongColumn.class, DoubleScalarGreaterEqualDoubleColumn.class,
-  StringColGreaterEqualStringColumn.class, StringColGreaterEqualStringScalar.class,
-  StringScalarGreaterEqualStringColumn.class, FilterStringColGreaterEqualStringColumn.class,
-  FilterStringColGreaterEqualStringScalar.class, FilterStringScalarGreaterEqualStringColumn.class,
+  StringGroupColGreaterEqualStringGroupColumn.class, FilterStringGroupColGreaterEqualStringGroupColumn.class,
+  StringGroupColGreaterEqualStringScalar.class,
+  StringGroupColGreaterEqualVarCharScalar.class, StringGroupColGreaterEqualCharScalar.class,
+  StringScalarGreaterEqualStringGroupColumn.class,
+  VarCharScalarGreaterEqualStringGroupColumn.class, CharScalarGreaterEqualStringGroupColumn.class,
+  FilterStringGroupColGreaterEqualStringScalar.class, FilterStringScalarGreaterEqualStringGroupColumn.class,
+  FilterStringGroupColGreaterEqualVarCharScalar.class, FilterVarCharScalarGreaterEqualStringGroupColumn.class,
+  FilterStringGroupColGreaterEqualCharScalar.class, FilterCharScalarGreaterEqualStringGroupColumn.class,
   FilterLongColGreaterEqualLongColumn.class, FilterLongColGreaterEqualDoubleColumn.class,
   FilterDoubleColGreaterEqualLongColumn.class, FilterDoubleColGreaterEqualDoubleColumn.class,
   FilterLongColGreaterEqualLongScalar.class, FilterLongColGreaterEqualDoubleScalar.class,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java Fri Sep  5 19:15:44 2014
@@ -35,9 +35,14 @@ import org.apache.hadoop.io.Text;
   DoubleColLessEqualLongScalar.class, DoubleColLessEqualDoubleScalar.class,
   LongScalarLessEqualLongColumn.class, LongScalarLessEqualDoubleColumn.class,
   DoubleScalarLessEqualLongColumn.class, DoubleScalarLessEqualDoubleColumn.class,
-  StringColLessEqualStringColumn.class, StringColLessEqualStringScalar.class,
-  StringScalarLessEqualStringColumn.class, FilterStringColLessEqualStringColumn.class,
-  FilterStringColLessEqualStringScalar.class, FilterStringScalarLessEqualStringColumn.class,
+  StringGroupColLessEqualStringGroupColumn.class, FilterStringGroupColLessEqualStringGroupColumn.class,
+  StringGroupColLessEqualStringScalar.class,
+  StringGroupColLessEqualVarCharScalar.class, StringGroupColLessEqualCharScalar.class,
+  StringScalarLessEqualStringGroupColumn.class,
+  VarCharScalarLessEqualStringGroupColumn.class, CharScalarLessEqualStringGroupColumn.class,
+  FilterStringGroupColLessEqualStringScalar.class, FilterStringScalarLessEqualStringGroupColumn.class,
+  FilterStringGroupColLessEqualVarCharScalar.class, FilterVarCharScalarLessEqualStringGroupColumn.class,
+  FilterStringGroupColLessEqualCharScalar.class, FilterCharScalarLessEqualStringGroupColumn.class,
   FilterLongColLessEqualLongColumn.class, FilterLongColLessEqualDoubleColumn.class,
   FilterDoubleColLessEqualLongColumn.class, FilterDoubleColLessEqualDoubleColumn.class,
   FilterLongColLessEqualLongScalar.class, FilterLongColLessEqualDoubleScalar.class,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java Fri Sep  5 19:15:44 2014
@@ -35,9 +35,14 @@ import org.apache.hadoop.io.Text;
   DoubleColGreaterLongScalar.class, DoubleColGreaterDoubleScalar.class,
   LongScalarGreaterLongColumn.class, LongScalarGreaterDoubleColumn.class,
   DoubleScalarGreaterLongColumn.class, DoubleScalarGreaterDoubleColumn.class,
-  StringColGreaterStringColumn.class, StringColGreaterStringScalar.class,
-  StringScalarGreaterStringColumn.class, FilterStringColGreaterStringColumn.class,
-  FilterStringColGreaterStringScalar.class, FilterStringScalarGreaterStringColumn.class,
+  StringGroupColGreaterStringGroupColumn.class, FilterStringGroupColGreaterStringGroupColumn.class,
+  StringGroupColGreaterStringScalar.class,
+  StringGroupColGreaterVarCharScalar.class, StringGroupColGreaterCharScalar.class,
+  StringScalarGreaterStringGroupColumn.class,
+  VarCharScalarGreaterStringGroupColumn.class, CharScalarGreaterStringGroupColumn.class,
+  FilterStringGroupColGreaterStringScalar.class, FilterStringScalarGreaterStringGroupColumn.class,
+  FilterStringGroupColGreaterVarCharScalar.class, FilterVarCharScalarGreaterStringGroupColumn.class,
+  FilterStringGroupColGreaterCharScalar.class, FilterCharScalarGreaterStringGroupColumn.class,
   FilterLongColGreaterLongColumn.class, FilterLongColGreaterDoubleColumn.class,
   FilterDoubleColGreaterLongColumn.class, FilterDoubleColGreaterDoubleColumn.class,
   FilterLongColGreaterLongScalar.class, FilterLongColGreaterDoubleScalar.class,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java Fri Sep  5 19:15:44 2014
@@ -35,9 +35,14 @@ import org.apache.hadoop.io.Text;
     DoubleColLessLongScalar.class, DoubleColLessDoubleScalar.class,
     LongScalarLessLongColumn.class, LongScalarLessDoubleColumn.class,
     DoubleScalarLessLongColumn.class, DoubleScalarLessDoubleColumn.class,
-    StringColLessStringColumn.class, StringColLessStringScalar.class,
-    StringScalarLessStringColumn.class, FilterStringColLessStringColumn.class,
-    FilterStringColLessStringScalar.class, FilterStringScalarLessStringColumn.class,
+    StringGroupColLessStringGroupColumn.class, FilterStringGroupColLessStringGroupColumn.class,
+    StringGroupColLessStringScalar.class,
+    StringGroupColLessVarCharScalar.class, StringGroupColLessCharScalar.class,
+    StringScalarLessStringGroupColumn.class,
+    VarCharScalarLessStringGroupColumn.class, CharScalarLessStringGroupColumn.class,
+    FilterStringGroupColLessStringScalar.class, FilterStringScalarLessStringGroupColumn.class,
+    FilterStringGroupColLessVarCharScalar.class, FilterVarCharScalarLessStringGroupColumn.class,
+    FilterStringGroupColLessCharScalar.class, FilterCharScalarLessStringGroupColumn.class,
     FilterLongColLessLongColumn.class, FilterLongColLessDoubleColumn.class,
     FilterDoubleColLessLongColumn.class, FilterDoubleColLessDoubleColumn.class,
     FilterLongColLessLongScalar.class, FilterLongColLessDoubleScalar.class,

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPNotEqual.java Fri Sep  5 19:15:44 2014
@@ -34,9 +34,14 @@ import org.apache.hadoop.hive.serde2.obj
   DoubleColNotEqualLongScalar.class, DoubleColNotEqualDoubleScalar.class,
   LongScalarNotEqualLongColumn.class, LongScalarNotEqualDoubleColumn.class,
   DoubleScalarNotEqualLongColumn.class, DoubleScalarNotEqualDoubleColumn.class,
-  StringColNotEqualStringColumn.class, StringColNotEqualStringScalar.class,
-  StringScalarNotEqualStringColumn.class, FilterStringColNotEqualStringColumn.class,
-  FilterStringColNotEqualStringScalar.class, FilterStringScalarNotEqualStringColumn.class,
+  StringGroupColNotEqualStringGroupColumn.class, FilterStringGroupColNotEqualStringGroupColumn.class,
+  StringGroupColNotEqualStringScalar.class,
+  StringGroupColNotEqualVarCharScalar.class, StringGroupColNotEqualCharScalar.class,
+  StringScalarNotEqualStringGroupColumn.class,
+  VarCharScalarNotEqualStringGroupColumn.class, CharScalarNotEqualStringGroupColumn.class, 
+  FilterStringGroupColNotEqualStringScalar.class, FilterStringScalarNotEqualStringGroupColumn.class,
+  FilterStringGroupColNotEqualVarCharScalar.class, FilterVarCharScalarNotEqualStringGroupColumn.class,
+  FilterStringGroupColNotEqualCharScalar.class, FilterCharScalarNotEqualStringGroupColumn.class,
   FilterLongColNotEqualLongColumn.class, FilterLongColNotEqualDoubleColumn.class,
   FilterDoubleColNotEqualLongColumn.class, FilterDoubleColNotEqualDoubleColumn.class,
   FilterLongColNotEqualLongScalar.class, FilterLongColNotEqualDoubleScalar.class,

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java Fri Sep  5 19:15:44 2014
@@ -29,7 +29,9 @@ import java.util.Map;
 
 import junit.framework.Assert;
 
+import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.ColAndCol;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.ColOrCol;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.DoubleColumnInList;
@@ -38,10 +40,14 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncLogWithBaseLongToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.FuncPowerDoubleToDouble;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprCharScalarStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnCharScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnVarCharScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprVarCharScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNotNull;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IsNull;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.LongColumnInList;
@@ -80,10 +86,14 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColumnBetween;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongColumnNotBetween;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterLongScalarGreaterLongColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterStringColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColGreaterStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringGroupColGreaterStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringGroupColGreaterStringScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColumnBetween;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterStringColumnNotBetween;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterVarCharColumnBetween;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterVarCharColumnNotBetween;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterCharColumnBetween;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FilterCharColumnNotBetween;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncLnDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncRoundDoubleToDouble;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.FuncSinDoubleToDouble;
@@ -128,6 +138,8 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToDecimal;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFTimestamp;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Test;
@@ -139,39 +151,39 @@ public class TestVectorizationContext {
     VectorUDFUnixTimeStampLong v1 = new VectorUDFUnixTimeStampLong();
     VectorExpressionDescriptor.Builder builder1 = new VectorExpressionDescriptor.Builder();
     VectorExpressionDescriptor.Descriptor d1 = builder1.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.LONG)
+        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.INT_DATETIME_FAMILY)
         .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
-    Assert.assertEquals(d1, v1.getDescriptor());
+    assertTrue(d1.matches(v1.getDescriptor()));
 
     VectorExpressionDescriptor.Builder builder2 = new VectorExpressionDescriptor.Builder();
     VectorExpressionDescriptor.Descriptor d2 = builder2.setMode(VectorExpressionDescriptor.Mode.FILTER)
-        .setNumArguments(2).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.LONG,
-            VectorExpressionDescriptor.ArgumentType.DOUBLE).setInputExpressionTypes(
+        .setNumArguments(2).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.INT_FAMILY,
+            VectorExpressionDescriptor.ArgumentType.FLOAT_FAMILY).setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN,
             VectorExpressionDescriptor.InputExpressionType.SCALAR).build();
     FilterLongColLessDoubleScalar v2 = new FilterLongColLessDoubleScalar();
-    Assert.assertEquals(d2, v2.getDescriptor());
+    assertTrue(d2.matches(v2.getDescriptor()));
 
     VectorExpressionDescriptor.Builder builder3 = new VectorExpressionDescriptor.Builder();
     VectorExpressionDescriptor.Descriptor d3 = builder3.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.STRING)
         .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
     StringLower v3 = new StringLower();
-    Assert.assertEquals(d3, v3.getDescriptor());
+    assertTrue(d3.matches(v3.getDescriptor()));
 
     VectorExpressionDescriptor.Builder builder4 = new VectorExpressionDescriptor.Builder();
     VectorExpressionDescriptor.Descriptor d4 = builder4.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
-        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.ANY)
+        .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.ALL_FAMILY)
         .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
     StringUpper v4 = new StringUpper();
-    Assert.assertEquals(d4, v4.getDescriptor());
+    assertTrue(d4.matches(v4.getDescriptor()));
 
     VectorExpressionDescriptor.Builder builder5 = new VectorExpressionDescriptor.Builder();
     VectorExpressionDescriptor.Descriptor d5 = builder5.setMode(VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(1).setArgumentTypes(VectorExpressionDescriptor.ArgumentType.STRING)
         .setInputExpressionTypes(VectorExpressionDescriptor.InputExpressionType.COLUMN).build();
     IsNull v5 = new IsNull();
-    Assert.assertEquals(d5, v5.getDescriptor());
+    assertTrue(d5.matches(v5.getDescriptor()));
   }
 
   @Test
@@ -289,11 +301,12 @@ public class TestVectorizationContext {
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
-    assertTrue(ve instanceof FilterStringColGreaterStringScalar);
+    assertTrue(ve instanceof FilterStringGroupColGreaterStringScalar);
   }
 
   @Test
   public void testFilterStringColCompareStringColumnExpressions() throws HiveException {
+    // Strings test
     ExprNodeColumnDesc col1Expr = new  ExprNodeColumnDesc(String.class, "col1", "table", false);
     ExprNodeColumnDesc col2Expr = new  ExprNodeColumnDesc(String.class, "col2", "table", false);
 
@@ -313,7 +326,97 @@ public class TestVectorizationContext {
 
     VectorExpression ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
 
-    assertTrue(ve instanceof FilterStringColGreaterStringColumn);
+    assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn);
+
+    // 2 CHAR test
+    CharTypeInfo charTypeInfo = new CharTypeInfo(10);
+    col1Expr = new  ExprNodeColumnDesc(charTypeInfo, "col1", "table", false);
+    col2Expr = new  ExprNodeColumnDesc(charTypeInfo, "col2", "table", false);
+
+    udf = new GenericUDFOPGreaterThan();
+    exprDesc = new ExprNodeGenericFuncDesc();
+    exprDesc.setGenericUDF(udf);
+    children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(col2Expr);
+    exprDesc.setChildren(children1);
+
+    vc = new VectorizationContext(columnMap, 2);
+
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+
+    assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn);
+
+    // 2 VARCHAR test
+    VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10);
+    col1Expr = new  ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false);
+    col2Expr = new  ExprNodeColumnDesc(varcharTypeInfo, "col2", "table", false);
+
+    udf = new GenericUDFOPGreaterThan();
+    exprDesc = new ExprNodeGenericFuncDesc();
+    exprDesc.setGenericUDF(udf);
+    children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(col2Expr);
+    exprDesc.setChildren(children1);
+
+    vc = new VectorizationContext(columnMap, 2);
+
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+
+    assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn);
+
+    // Some mix tests (STRING, CHAR), (VARCHAR, CHAR), (VARCHAR, STRING)...
+    col1Expr = new  ExprNodeColumnDesc(String.class, "col1", "table", false);
+    col2Expr = new  ExprNodeColumnDesc(charTypeInfo, "col2", "table", false);
+
+    udf = new GenericUDFOPGreaterThan();
+    exprDesc = new ExprNodeGenericFuncDesc();
+    exprDesc.setGenericUDF(udf);
+    children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(col2Expr);
+    exprDesc.setChildren(children1);
+
+    vc = new VectorizationContext(columnMap, 2);
+
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+
+    assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn);
+
+    col1Expr = new  ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false);
+    col2Expr = new  ExprNodeColumnDesc(charTypeInfo, "col2", "table", false);
+
+    udf = new GenericUDFOPGreaterThan();
+    exprDesc = new ExprNodeGenericFuncDesc();
+    exprDesc.setGenericUDF(udf);
+    children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(col2Expr);
+    exprDesc.setChildren(children1);
+
+    vc = new VectorizationContext(columnMap, 2);
+
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+
+    assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn);
+
+    col1Expr = new  ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false);
+    col2Expr = new  ExprNodeColumnDesc(String.class, "col2", "table", false);
+
+    udf = new GenericUDFOPGreaterThan();
+    exprDesc = new ExprNodeGenericFuncDesc();
+    exprDesc.setGenericUDF(udf);
+    children1 = new ArrayList<ExprNodeDesc>(2);
+    children1.add(col1Expr);
+    children1.add(col2Expr);
+    exprDesc.setChildren(children1);
+
+    vc = new VectorizationContext(columnMap, 2);
+
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+
+    assertTrue(ve instanceof FilterStringGroupColGreaterStringGroupColumn);
   }
 
   @Test
@@ -908,6 +1011,7 @@ public class TestVectorizationContext {
 
   @Test
   public void testBetweenFilters() throws HiveException {
+    // string tests
     ExprNodeColumnDesc col1Expr = new  ExprNodeColumnDesc(String.class, "col1", "table", false);
     ExprNodeConstantDesc constDesc = new ExprNodeConstantDesc("Alpha");
     ExprNodeConstantDesc constDesc2 = new ExprNodeConstantDesc("Bravo");
@@ -934,6 +1038,56 @@ public class TestVectorizationContext {
     ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
     assertTrue(ve instanceof FilterStringColumnNotBetween);
 
+    // CHAR tests
+    CharTypeInfo charTypeInfo = new CharTypeInfo(10);
+    col1Expr = new  ExprNodeColumnDesc(charTypeInfo, "col1", "table", false);
+    constDesc = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Alpha", 10));
+    constDesc2 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Bravo", 10));
+
+    // CHAR BETWEEN
+    udf = new GenericUDFBetween();
+    children1 = new ArrayList<ExprNodeDesc>();
+    children1.add(new ExprNodeConstantDesc(new Boolean(false))); // no NOT keyword
+    children1.add(col1Expr);
+    children1.add(constDesc);
+    children1.add(constDesc2);
+    exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf,
+        children1);
+
+    vc = new VectorizationContext(columnMap, 2);
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+    assertTrue(ve instanceof FilterCharColumnBetween);
+
+    // CHAR NOT BETWEEN
+    children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); // has NOT keyword
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+    assertTrue(ve instanceof FilterCharColumnNotBetween);
+
+    // VARCHAR tests
+    VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10);
+    col1Expr = new  ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false);
+    constDesc = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Alpha", 10));
+    constDesc2 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Bravo", 10));
+
+    // VARCHAR BETWEEN
+    udf = new GenericUDFBetween();
+    children1 = new ArrayList<ExprNodeDesc>();
+    children1.add(new ExprNodeConstantDesc(new Boolean(false))); // no NOT keyword
+    children1.add(col1Expr);
+    children1.add(constDesc);
+    children1.add(constDesc2);
+    exprDesc = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, udf,
+        children1);
+
+    vc = new VectorizationContext(columnMap, 2);
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+    assertTrue(ve instanceof FilterVarCharColumnBetween);
+
+    // VARCHAR NOT BETWEEN
+    children1.set(0, new ExprNodeConstantDesc(new Boolean(true))); // has NOT keyword
+    ve = vc.getVectorExpression(exprDesc, VectorExpressionDescriptor.Mode.FILTER);
+    assertTrue(ve instanceof FilterVarCharColumnNotBetween);
+
     // long BETWEEN
     children1.set(0, new ExprNodeConstantDesc(new Boolean(false)));
     children1.set(1, new ExprNodeColumnDesc(Long.class, "col1", "table", false));
@@ -1173,12 +1327,12 @@ public class TestVectorizationContext {
     children1.set(1, col2Expr);
     children1.set(2, col3Expr);
     ve = vc.getVectorExpression(exprDesc);
-    assertTrue(ve instanceof IfExprStringColumnStringColumn);
+    assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn);
 
     // column/scalar
     children1.set(2,  constDesc3);
     ve = vc.getVectorExpression(exprDesc);
-    assertTrue(ve instanceof IfExprStringColumnStringScalar);
+    assertTrue(ve instanceof IfExprStringGroupColumnStringScalar);
 
     // scalar/scalar
     children1.set(1,  constDesc2);
@@ -1188,7 +1342,62 @@ public class TestVectorizationContext {
     // scalar/column
     children1.set(2,  col3Expr);
     ve = vc.getVectorExpression(exprDesc);
-    assertTrue(ve instanceof IfExprStringScalarStringColumn);
-  }  
+    assertTrue(ve instanceof IfExprStringScalarStringGroupColumn);
+
+    // test for CHAR type
+    CharTypeInfo charTypeInfo = new CharTypeInfo(10);
+    constDesc2 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Alpha", 10));
+    constDesc3 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Bravo", 10));
+    col2Expr = new  ExprNodeColumnDesc(charTypeInfo, "col2", "table", false);
+    col3Expr = new  ExprNodeColumnDesc(charTypeInfo, "col3", "table", false);
+
+    // column/column
+    children1.set(1, col2Expr);
+    children1.set(2, col3Expr);
+    ve = vc.getVectorExpression(exprDesc);
+    assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn);
+
+    // column/scalar
+    children1.set(2,  constDesc3);
+    ve = vc.getVectorExpression(exprDesc);
+    assertTrue(ve instanceof IfExprStringGroupColumnCharScalar);
+
+    // scalar/scalar
+    children1.set(1,  constDesc2);
+//    ve = vc.getVectorExpression(exprDesc);
+//    assertTrue(ve instanceof IfExprCharScalarCharScalar);
+
+    // scalar/column
+    children1.set(2,  col3Expr);
+    ve = vc.getVectorExpression(exprDesc);
+    assertTrue(ve instanceof IfExprCharScalarStringGroupColumn);
+ 
+    // test for VARCHAR type
+    VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10);
+    constDesc2 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Alpha", 10));
+    constDesc3 = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Bravo", 10));
+    col2Expr = new  ExprNodeColumnDesc(varcharTypeInfo, "col2", "table", false);
+    col3Expr = new  ExprNodeColumnDesc(varcharTypeInfo, "col3", "table", false);
+
+    // column/column
+    children1.set(1, col2Expr);
+    children1.set(2, col3Expr);
+    ve = vc.getVectorExpression(exprDesc);
+    assertTrue(ve instanceof IfExprStringGroupColumnStringGroupColumn);
+
+    // column/scalar
+    children1.set(2,  constDesc3);
+    ve = vc.getVectorExpression(exprDesc);
+    assertTrue(ve instanceof IfExprStringGroupColumnVarCharScalar);
 
+    // scalar/scalar
+    children1.set(1,  constDesc2);
+//    ve = vc.getVectorExpression(exprDesc);
+//    assertTrue(ve instanceof IfExprVarCharScalarVarCharScalar);
+
+    // scalar/column
+    children1.set(2,  col3Expr);
+    ve = vc.getVectorExpression(exprDesc);
+    assertTrue(ve instanceof IfExprVarCharScalarStringGroupColumn);
+  }
 }

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorConditionalExpressions.java Fri Sep  5 19:15:44 2014
@@ -32,10 +32,10 @@ import org.apache.hadoop.hive.ql.exec.ve
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleScalar;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleScalarDoubleColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.IfExprDoubleColumnDoubleScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringColumn;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringColumnStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringGroupColumn;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringGroupColumnStringScalar;
+import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringGroupColumn;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringScalar;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IfExprStringScalarStringColumn;
 
 import org.junit.Test;
 
@@ -388,7 +388,7 @@ public class TestVectorConditionalExpres
   @Test
   public void testIfExprStringColumnStringColumn() {
     VectorizedRowBatch batch = getBatch1Long3BytesVectors();
-    VectorExpression expr = new IfExprStringColumnStringColumn(0, 1, 2, 3);
+    VectorExpression expr = new IfExprStringGroupColumnStringGroupColumn(0, 1, 2, 3);
     BytesColumnVector r = (BytesColumnVector) batch.cols[3];
     expr.evaluate(batch);
     assertTrue(getString(r, 0).equals("arg3_0"));
@@ -474,7 +474,7 @@ public class TestVectorConditionalExpres
   public void testIfExprStringColumnStringScalar() {
     VectorizedRowBatch batch = getBatch1Long3BytesVectors();
     byte[] scalar = getUTF8Bytes("scalar");
-    VectorExpression expr = new IfExprStringColumnStringScalar(0, 1, scalar, 3);
+    VectorExpression expr = new IfExprStringGroupColumnStringScalar(0, 1, scalar, 3);
     BytesColumnVector r = (BytesColumnVector) batch.cols[3];
     expr.evaluate(batch);
     assertTrue(getString(r, 0).equals("scalar"));
@@ -498,7 +498,7 @@ public class TestVectorConditionalExpres
   public void testIfExprStringScalarStringColumn() {
     VectorizedRowBatch batch = getBatch1Long3BytesVectors();
     byte[] scalar = getUTF8Bytes("scalar");
-    VectorExpression expr = new IfExprStringScalarStringColumn(0,scalar, 2, 3);
+    VectorExpression expr = new IfExprStringScalarStringGroupColumn(0,scalar, 2, 3);
     BytesColumnVector r = (BytesColumnVector) batch.cols[3];
     expr.evaluate(batch);
     assertTrue(getString(r, 0).equals("arg3_0"));

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java?rev=1622763&r1=1622762&r2=1622763&view=diff
==============================================================================
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java (original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorGenericDateExpressions.java Fri Sep  5 19:15:44 2014
@@ -27,7 +27,6 @@ import org.apache.hadoop.hive.serde2.io.
 import org.junit.Assert;
 import org.junit.Test;
 
-
 import java.io.UnsupportedEncodingException;
 import java.sql.Date;
 import java.text.SimpleDateFormat;
@@ -136,9 +135,12 @@ public class TestVectorGenericDateExpres
         return toTimestamp(date);
 
       case STRING:
+      case CHAR:
+      case VARCHAR:
         return toString(date);
+      default:
+        throw new Error("Unsupported input type " + type.name());
     }
-    return null;
   }
 
   private void testDateAddColScalar(VectorExpression.Type colType1, boolean isPositive) {
@@ -214,8 +216,12 @@ public class TestVectorGenericDateExpres
           udf = new VectorUDFDateAddScalarCol(toTimestamp(scalar1), 0, 1);
           break;
         case STRING:
+        case CHAR:
+        case VARCHAR:
           udf = new VectorUDFDateAddScalarCol(toString(scalar1), 0, 1);
           break;
+        default:
+          throw new Error("Invalid input type: " + colType1.name());
       }
     } else {
       switch (colType1) {
@@ -226,8 +232,12 @@ public class TestVectorGenericDateExpres
           udf = new VectorUDFDateSubScalarCol(toTimestamp(scalar1), 0, 1);
           break;
         case STRING:
+        case CHAR:
+        case VARCHAR:
           udf = new VectorUDFDateSubScalarCol(toString(scalar1), 0, 1);
           break;
+        default:
+          throw new Error("Invalid input type: " + colType1.name());
       }
     }
     udf.setInputTypes(colType1, VectorExpression.Type.OTHER);
@@ -694,7 +704,9 @@ public class TestVectorGenericDateExpres
 
   private void validateToDate(VectorizedRowBatch batch, VectorExpression.Type colType, LongColumnVector date) {
     VectorExpression udf;
-    if (colType == VectorExpression.Type.STRING) {
+    if (colType == VectorExpression.Type.STRING ||
+        colType == VectorExpression.Type.CHAR ||
+        colType == VectorExpression.Type.VARCHAR) {
       udf = new CastStringToDate(0, 1);
     } else {
       udf = new CastLongToDate(0, 1);