You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2017/07/21 07:20:39 UTC

hive git commit: HIVE-17116: Vectorization: Add infrastructure for vectorization of ROW__ID struct (Matt McCline, reviewed by Teddy Choi) try #2

Repository: hive
Updated Branches:
  refs/heads/master aa5e9bfab -> f6b61d600


HIVE-17116: Vectorization: Add infrastructure for vectorization of ROW__ID struct (Matt McCline, reviewed by Teddy Choi) try #2


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f6b61d60
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f6b61d60
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f6b61d60

Branch: refs/heads/master
Commit: f6b61d6001bb0913dd08068b7d9f268bf4ca7beb
Parents: aa5e9bf
Author: Matt McCline <mm...@hortonworks.com>
Authored: Fri Jul 21 02:20:16 2017 -0500
Committer: Matt McCline <mm...@hortonworks.com>
Committed: Fri Jul 21 02:20:16 2017 -0500

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   4 +
 .../test/resources/testconfiguration.properties |   1 +
 .../hive/llap/io/api/impl/LlapInputFormat.java  |   4 +-
 .../hive/ql/exec/vector/VectorMapOperator.java  |  41 ++++-
 .../ql/exec/vector/VectorizationContext.java    |   2 +
 .../ql/exec/vector/VectorizedRowBatchCtx.java   |  43 +++--
 .../hadoop/hive/ql/metadata/VirtualColumn.java  |  14 ++
 .../hive/ql/optimizer/physical/Vectorizer.java  | 164 +++++++++++++++----
 8 files changed, 225 insertions(+), 48 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/f6b61d60/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index f360dfa..df45f2c 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2856,6 +2856,10 @@ public class HiveConf extends Configuration {
         "of aggregations that use complex types.\n",
         "For example, AVG uses a complex type (STRUCT) for partial aggregation results" +
         "The default value is true."),
+    HIVE_VECTORIZATION_ROW_IDENTIFIER_ENABLED("hive.vectorized.row.identifier.enabled", false,
+        "This flag should be set to true to enable vectorization\n" +
+        "of ROW__ID.\n" +
+        "The default value is false."),
 
     HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, "This property has been extended to control "
         + "whether to check, convert, and normalize partition value to conform to its column type in "

http://git-wip-us.apache.org/repos/asf/hive/blob/f6b61d60/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index cffe245..f66e19b 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -356,6 +356,7 @@ minillaplocal.shared.query.files=alter_merge_2_orc.q,\
   vector_reduce2.q,\
   vector_reduce3.q,\
   vector_reduce_groupby_decimal.q,\
+  vector_row__id.q,\
   vector_string_concat.q,\
   vector_struct_in.q,\
   vector_udf_character_length.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/f6b61d60/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
----------------------------------------------------------------------
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
index 22ca025..79ec4ed 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
@@ -190,8 +190,10 @@ public class LlapInputFormat implements InputFormat<NullWritable, VectorizedRowB
         }
       }
     }
+    // UNDONE: Virtual column support?
     return new VectorizedRowBatchCtx(colNames.toArray(new String[colNames.size()]),
-        colTypes.toArray(new TypeInfo[colTypes.size()]), null, partitionColumnCount, new String[0]);
+        colTypes.toArray(new TypeInfo[colTypes.size()]), null, partitionColumnCount,
+        new VirtualColumn[0], new String[0]);
   }
 
   static TableScanOperator findTsOp(MapWork mapWork) throws HiveException {

http://git-wip-us.apache.org/repos/asf/hive/blob/f6b61d60/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
index ed50df2..1ac8914 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapOperator.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.io.AcidUtils;
 import org.apache.hadoop.hive.ql.io.orc.OrcSerde;
 import org.apache.hadoop.hive.ql.io.orc.OrcStruct;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
@@ -125,6 +126,9 @@ public class VectorMapOperator extends AbstractMapOperator {
   private transient int dataColumnCount;
   private transient int partitionColumnCount;
   private transient Object[] partitionValues;
+  private transient int virtualColumnCount;
+  private transient boolean hasRowIdentifier;
+  private transient int rowIdentifierColumnNum;
 
   private transient boolean[] dataColumnsToIncludeTruncated;
 
@@ -504,6 +508,19 @@ public class VectorMapOperator extends AbstractMapOperator {
     dataColumnCount = batchContext.getDataColumnCount();
     partitionColumnCount = batchContext.getPartitionColumnCount();
     partitionValues = new Object[partitionColumnCount];
+    virtualColumnCount = batchContext.getVirtualColumnCount();
+    rowIdentifierColumnNum = -1;
+    if (virtualColumnCount > 0) {
+      final int firstVirtualColumnNum = dataColumnCount + partitionColumnCount;
+      VirtualColumn[] neededVirtualColumns = batchContext.getNeededVirtualColumns();
+      hasRowIdentifier = (neededVirtualColumns[0] == VirtualColumn.ROWID);
+      if (hasRowIdentifier) {
+        rowIdentifierColumnNum = firstVirtualColumnNum;
+      }
+    } else {
+      hasRowIdentifier = false;
+    }
+    
 
     dataColumnNums = batchContext.getDataColumnNums();
     Preconditions.checkState(dataColumnNums != null);
@@ -601,6 +618,13 @@ public class VectorMapOperator extends AbstractMapOperator {
         currentVectorPartContext.partName);
   }
 
+  private void setRowIdentiferToNull(VectorizedRowBatch batch) {
+    ColumnVector rowIdentifierColVector = batch.cols[rowIdentifierColumnNum];
+    rowIdentifierColVector.isNull[0] = true;
+    rowIdentifierColVector.noNulls = false;
+    rowIdentifierColVector.isRepeating = true;
+  }
+
   /*
    * Setup the context for reading from the next partition file.
    */
@@ -695,6 +719,12 @@ public class VectorMapOperator extends AbstractMapOperator {
         batchContext.addPartitionColsToBatch(deserializerBatch, partitionValues);
       }
 
+      if (hasRowIdentifier) {
+
+        // No ACID in code path -- set ROW__ID to NULL.
+        setRowIdentiferToNull(deserializerBatch);
+      }
+
       /*
        * Set or clear the rest of the reading variables based on {vector|row} deserialization.
        */
@@ -778,7 +808,16 @@ public class VectorMapOperator extends AbstractMapOperator {
            */
           batchCounter++;
           if (value != null) {
-            numRows += ((VectorizedRowBatch) value).size;
+            VectorizedRowBatch batch = (VectorizedRowBatch) value;
+            numRows += batch.size;
+            if (hasRowIdentifier) {
+
+              // UNDONE: Pass ROW__ID STRUCT column through IO Context to get filled in by ACID reader
+              // UNDONE: Or, perhaps tell it to do it before calling us, ...
+              // UNDONE: For now, set column to NULL.
+
+              setRowIdentiferToNull(batch);
+            }
           }
           oneRootOperator.process(value, 0);
           if (oneRootOperator.getDone()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/f6b61d60/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
index 9e026f0..fcebb6f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
@@ -124,6 +124,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.*;
 import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor;
 import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFArgDesc;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.AggregationDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
@@ -221,6 +222,7 @@ public class VectorizationContext {
       projectedColumns.add(i);
       projectionColumnMap.put(projectionColumnNames.get(i), i);
     }
+
     int firstOutputColumnIndex = projectedColumns.size();
     this.ocm = new OutputColumnManager(firstOutputColumnIndex);
     this.firstOutputColumnIndex = firstOutputColumnIndex;

http://git-wip-us.apache.org/repos/asf/hive/blob/f6b61d60/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
index 3c12e04..90d1372 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.io.IOPrepareCache;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
@@ -78,6 +79,8 @@ public class VectorizedRowBatchCtx {
   private int[] dataColumnNums;
   private int dataColumnCount;
   private int partitionColumnCount;
+  private int virtualColumnCount;
+  private VirtualColumn[] neededVirtualColumns;
 
   private String[] scratchColumnTypeNames;
 
@@ -88,14 +91,17 @@ public class VectorizedRowBatchCtx {
   }
 
   public VectorizedRowBatchCtx(String[] rowColumnNames, TypeInfo[] rowColumnTypeInfos,
-      int[] dataColumnNums, int partitionColumnCount, String[] scratchColumnTypeNames) {
+      int[] dataColumnNums, int partitionColumnCount, VirtualColumn[] neededVirtualColumns,
+      String[] scratchColumnTypeNames) {
     this.rowColumnNames = rowColumnNames;
     this.rowColumnTypeInfos = rowColumnTypeInfos;
     this.dataColumnNums = dataColumnNums;
     this.partitionColumnCount = partitionColumnCount;
+    this.neededVirtualColumns = neededVirtualColumns;
+    this.virtualColumnCount = neededVirtualColumns.length;
     this.scratchColumnTypeNames = scratchColumnTypeNames;
 
-    dataColumnCount = rowColumnTypeInfos.length - partitionColumnCount;
+    dataColumnCount = rowColumnTypeInfos.length - partitionColumnCount - virtualColumnCount;
   }
 
   public String[] getRowColumnNames() {
@@ -118,6 +124,14 @@ public class VectorizedRowBatchCtx {
     return partitionColumnCount;
   }
 
+  public int getVirtualColumnCount() {
+    return virtualColumnCount;
+  }
+
+  public VirtualColumn[] getNeededVirtualColumns() {
+    return neededVirtualColumns;
+  }
+
   public String[] getScratchColumnTypeNames() {
     return scratchColumnTypeNames;
   }
@@ -138,6 +152,8 @@ public class VectorizedRowBatchCtx {
     rowColumnTypeInfos = VectorizedBatchUtil.typeInfosFromStructObjectInspector(structObjectInspector);
     dataColumnNums = null;
     partitionColumnCount = 0;
+    virtualColumnCount = 0;
+    neededVirtualColumns = new VirtualColumn[0];
     dataColumnCount = rowColumnTypeInfos.length;
 
     // Scratch column information.
@@ -204,13 +220,14 @@ public class VectorizedRowBatchCtx {
    */
   public VectorizedRowBatch createVectorizedRowBatch()
   {
-    final int dataAndPartColumnCount = rowColumnTypeInfos.length;
-    final int totalColumnCount = dataAndPartColumnCount + scratchColumnTypeNames.length;
+    final int nonScratchColumnCount = rowColumnTypeInfos.length;
+    final int totalColumnCount =
+        nonScratchColumnCount + scratchColumnTypeNames.length;
     VectorizedRowBatch result = new VectorizedRowBatch(totalColumnCount);
 
     if (dataColumnNums == null) {
         // All data and partition columns.
-      for (int i = 0; i < dataAndPartColumnCount; i++) {
+      for (int i = 0; i < nonScratchColumnCount; i++) {
         TypeInfo typeInfo = rowColumnTypeInfos[i];
         result.cols[i] = VectorizedBatchUtil.createColumnVector(typeInfo);
       }
@@ -218,24 +235,30 @@ public class VectorizedRowBatchCtx {
       // Create only needed/included columns data columns.
       for (int i = 0; i < dataColumnNums.length; i++) {
         int columnNum = dataColumnNums[i];
-        Preconditions.checkState(columnNum < dataAndPartColumnCount);
+        Preconditions.checkState(columnNum < nonScratchColumnCount);
         TypeInfo typeInfo = rowColumnTypeInfos[columnNum];
         result.cols[columnNum] = VectorizedBatchUtil.createColumnVector(typeInfo);
       }
-      // Always create partition columns.
-      final int endColumnNum = dataColumnCount + partitionColumnCount;
-      for (int partitionColumnNum = dataColumnCount; partitionColumnNum < endColumnNum; partitionColumnNum++) {
+      // Always create partition and virtual columns.
+      final int partitionEndColumnNum = dataColumnCount + partitionColumnCount;
+      for (int partitionColumnNum = dataColumnCount; partitionColumnNum < partitionEndColumnNum; partitionColumnNum++) {
         TypeInfo typeInfo = rowColumnTypeInfos[partitionColumnNum];
         result.cols[partitionColumnNum] = VectorizedBatchUtil.createColumnVector(typeInfo);
       }
+      final int virtualEndColumnNum = partitionEndColumnNum + virtualColumnCount;
+      for (int virtualColumnNum = partitionEndColumnNum; virtualColumnNum < virtualEndColumnNum; virtualColumnNum++) {
+        TypeInfo typeInfo = rowColumnTypeInfos[virtualColumnNum];
+        result.cols[virtualColumnNum] = VectorizedBatchUtil.createColumnVector(typeInfo);
+      }
     }
 
     for (int i = 0; i < scratchColumnTypeNames.length; i++) {
       String typeName = scratchColumnTypeNames[i];
-      result.cols[rowColumnTypeInfos.length + i] =
+      result.cols[nonScratchColumnCount + i] =
           VectorizedBatchUtil.createColumnVector(typeName);
     }
 
+    // UNDONE: Also remember virtualColumnCount...
     result.setPartitionInfo(dataColumnCount, partitionColumnCount);
 
     result.reset();

http://git-wip-us.apache.org/repos/asf/hive/blob/f6b61d60/ql/src/java/org/apache/hadoop/hive/ql/metadata/VirtualColumn.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/VirtualColumn.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/VirtualColumn.java
index 2435bf1..0032305 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/VirtualColumn.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/VirtualColumn.java
@@ -20,10 +20,13 @@ package org.apache.hadoop.hive.ql.metadata;
 
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.List;
 import java.util.ListIterator;
+import java.util.Map;
 
 import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Iterables;
 
 import org.apache.hadoop.conf.Configuration;
@@ -64,6 +67,17 @@ public enum VirtualColumn {
       ImmutableSet.of(FILENAME.getName(), BLOCKOFFSET.getName(), ROWOFFSET.getName(),
           RAWDATASIZE.getName(), GROUPINGID.getName(), ROWID.getName());
 
+  public static final ImmutableMap<String, VirtualColumn> VIRTUAL_COLUMN_NAME_MAP =
+       new ImmutableMap.Builder<String, VirtualColumn>().putAll(getColumnNameMap()).build();
+
+  private static Map<String, VirtualColumn> getColumnNameMap() {
+    Map<String, VirtualColumn> map = new HashMap<String, VirtualColumn>();
+    for (VirtualColumn virtualColumn : values()) {
+      map.put(virtualColumn.name, virtualColumn);
+    }
+    return map;
+  }
+
   private final String name;
   private final TypeInfo typeInfo;
   private final boolean isHidden;

http://git-wip-us.apache.org/repos/asf/hive/blob/f6b61d60/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
index 8183194..0913f40 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
@@ -75,6 +75,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorColumnSourceMapping;
 import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.vector.VectorMapJoinOuterFilteredOperator;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext;
+import org.apache.hadoop.hive.ql.exec.vector.VectorizedBatchUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.HiveVectorAdaptorUsageMode;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext.InConstantType;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizationContextRegion;
@@ -229,6 +230,7 @@ import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hive.common.util.AnnotationUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import com.google.common.collect.ImmutableSet;
 import com.google.common.base.Preconditions;
 
 public class Vectorizer implements PhysicalPlanResolver {
@@ -274,6 +276,10 @@ public class Vectorizer implements PhysicalPlanResolver {
 
   private Set<String> supportedAggregationUdfs = new HashSet<String>();
 
+  // The set of virtual columns that vectorized readers *MAY* support.
+  public static final ImmutableSet<VirtualColumn> vectorizableVirtualColumns =
+      ImmutableSet.of(VirtualColumn.ROWID);
+
   private HiveConf hiveConf;
 
   private boolean useVectorizedInputFileFormat;
@@ -283,6 +289,7 @@ public class Vectorizer implements PhysicalPlanResolver {
   private boolean isPtfVectorizationEnabled;
   private boolean isVectorizationComplexTypesEnabled;
   private boolean isVectorizationGroupByComplexTypesEnabled;
+  private boolean isVectorizedRowIdentifierEnabled;
 
   private boolean isSchemaEvolution;
 
@@ -316,6 +323,9 @@ public class Vectorizer implements PhysicalPlanResolver {
 
   private long vectorizedVertexNum = -1;
 
+  private Set<VirtualColumn> availableVectorizedVirtualColumnSet = null;
+  private Set<VirtualColumn> neededVirtualColumnSet = null;
+
   public Vectorizer() {
 
     /*
@@ -453,6 +463,8 @@ public class Vectorizer implements PhysicalPlanResolver {
     List<Integer> dataColumnNums;
 
     int partitionColumnCount;
+    List<VirtualColumn> availableVirtualColumnList;
+    List<VirtualColumn> neededVirtualColumnList;
     boolean useVectorizedInputFileFormat;
 
     boolean groupByVectorOutput;
@@ -488,6 +500,12 @@ public class Vectorizer implements PhysicalPlanResolver {
     public void setPartitionColumnCount(int partitionColumnCount) {
       this.partitionColumnCount = partitionColumnCount;
     }
+    public void setAvailableVirtualColumnList(List<VirtualColumn> availableVirtualColumnList) {
+      this.availableVirtualColumnList = availableVirtualColumnList;
+    }
+    public void setNeededVirtualColumnList(List<VirtualColumn> neededVirtualColumnList) {
+      this.neededVirtualColumnList = neededVirtualColumnList;
+    }
     public void setScratchTypeNameArray(String[] scratchTypeNameArray) {
       this.scratchTypeNameArray = scratchTypeNameArray;
     }
@@ -522,6 +540,16 @@ public class Vectorizer implements PhysicalPlanResolver {
 
     public void transferToBaseWork(BaseWork baseWork) {
 
+      final int virtualColumnCount;
+      VirtualColumn[] neededVirtualColumns;
+      if (neededVirtualColumnList != null && neededVirtualColumnList.size() > 0) {
+        virtualColumnCount = neededVirtualColumnList.size();
+        neededVirtualColumns = neededVirtualColumnList.toArray(new VirtualColumn[0]);
+      } else {
+        virtualColumnCount = 0;
+        neededVirtualColumns = new VirtualColumn[0];
+      }
+
       String[] allColumnNameArray = allColumnNames.toArray(new String[0]);
       TypeInfo[] allTypeInfoArray = allTypeInfos.toArray(new TypeInfo[0]);
       int[] dataColumnNumsArray;
@@ -537,6 +565,7 @@ public class Vectorizer implements PhysicalPlanResolver {
             allTypeInfoArray,
             dataColumnNumsArray,
             partitionColumnCount,
+            neededVirtualColumns,
             scratchTypeNameArray);
       baseWork.setVectorizedRowBatchCtx(vectorizedRowBatchCtx);
 
@@ -687,20 +716,41 @@ public class Vectorizer implements PhysicalPlanResolver {
     }
 
     private void getTableScanOperatorSchemaInfo(TableScanOperator tableScanOperator,
-        List<String> logicalColumnNameList, List<TypeInfo> logicalTypeInfoList) {
+        List<String> logicalColumnNameList, List<TypeInfo> logicalTypeInfoList,
+        List<VirtualColumn> availableVirtualColumnList) {
 
-      // Add all non-virtual columns to make a vectorization context for
+      // Add all columns to make a vectorization context for
       // the TableScan operator.
       RowSchema rowSchema = tableScanOperator.getSchema();
       for (ColumnInfo c : rowSchema.getSignature()) {
-        // Validation will later exclude vectorization of virtual columns usage (HIVE-5560).
-        if (!isVirtualColumn(c)) {
-          String columnName = c.getInternalName();
-          String typeName = c.getTypeName();
-          TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeName);
 
+        // Validation will later exclude vectorization of virtual columns usage if necessary.
+        String columnName = c.getInternalName();
+
+        // Turns out partition columns get marked as virtual in ColumnInfo, so we need to
+        // check the VirtualColumn directly.
+        VirtualColumn virtualColumn = VirtualColumn.VIRTUAL_COLUMN_NAME_MAP.get(columnName);
+        if (virtualColumn == null) {
           logicalColumnNameList.add(columnName);
-          logicalTypeInfoList.add(typeInfo);
+          logicalTypeInfoList.add(TypeInfoUtils.getTypeInfoFromTypeString(c.getTypeName()));
+        } else {
+
+          // The planner gives us a subset virtual columns available for this table scan.
+          //    AND
+          // We only support some virtual columns in vectorization.
+          //
+          // So, create the intersection.  Note these are available vectorizable virtual columns.
+          // Later we remember which virtual columns were *actually used* in the query so
+          // just those will be included in the Map VectorizedRowBatchCtx that has the
+          // information for creating the Map VectorizedRowBatch.
+          //
+          if (!vectorizableVirtualColumns.contains(virtualColumn)) {
+            continue;
+          }
+          if (virtualColumn == VirtualColumn.ROWID && !isVectorizedRowIdentifierEnabled) {
+            continue;
+          }
+          availableVirtualColumnList.add(virtualColumn);
         }
       }
     }
@@ -893,14 +943,19 @@ public class Vectorizer implements PhysicalPlanResolver {
       boolean isAcidTable = tableScanOperator.getConf().isAcidTable();
 
       // These names/types are the data columns plus partition columns.
-      final List<String> allColumnNameList = new ArrayList<String>();
-      final List<TypeInfo> allTypeInfoList = new ArrayList<TypeInfo>();
+      final List<String> dataAndPartColumnNameList = new ArrayList<String>();
+      final List<TypeInfo> dataAndPartTypeInfoList = new ArrayList<TypeInfo>();
+
+      final List<VirtualColumn> availableVirtualColumnList = new ArrayList<VirtualColumn>();
 
-      getTableScanOperatorSchemaInfo(tableScanOperator, allColumnNameList, allTypeInfoList);
+      getTableScanOperatorSchemaInfo(
+          tableScanOperator,
+          dataAndPartColumnNameList, dataAndPartTypeInfoList,
+          availableVirtualColumnList);
 
       final List<Integer> dataColumnNums = new ArrayList<Integer>();
 
-      final int allColumnCount = allColumnNameList.size();
+      final int dataAndPartColumnCount = dataAndPartColumnNameList.size();
 
       /*
        * Validate input formats of all the partitions can be vectorized.
@@ -956,17 +1011,17 @@ public class Vectorizer implements PhysicalPlanResolver {
           LinkedHashMap<String, String> partSpec = partDesc.getPartSpec();
           if (partSpec != null && partSpec.size() > 0) {
             partitionColumnCount = partSpec.size();
-            dataColumnCount = allColumnCount - partitionColumnCount;
+            dataColumnCount = dataAndPartColumnCount - partitionColumnCount;
           } else {
             partitionColumnCount = 0;
-            dataColumnCount = allColumnCount;
+            dataColumnCount = dataAndPartColumnCount;
           }
 
-          determineDataColumnNums(tableScanOperator, allColumnNameList, dataColumnCount,
+          determineDataColumnNums(tableScanOperator, dataAndPartColumnNameList, dataColumnCount,
               dataColumnNums);
 
-          tableDataColumnList = allColumnNameList.subList(0, dataColumnCount);
-          tableDataTypeInfoList = allTypeInfoList.subList(0, dataColumnCount);
+          tableDataColumnList = dataAndPartColumnNameList.subList(0, dataColumnCount);
+          tableDataTypeInfoList = dataAndPartTypeInfoList.subList(0, dataColumnCount);
 
           isFirst = false;
         }
@@ -1038,10 +1093,14 @@ public class Vectorizer implements PhysicalPlanResolver {
         vectorPartDesc.setDataTypeInfos(nextDataTypeInfoList);
       }
 
-      vectorTaskColumnInfo.setAllColumnNames(allColumnNameList);
-      vectorTaskColumnInfo.setAllTypeInfos(allTypeInfoList);
+      // For now, we don't know which virtual columns are going to be included.  We'll add them
+      // later...
+      vectorTaskColumnInfo.setAllColumnNames(dataAndPartColumnNameList);
+      vectorTaskColumnInfo.setAllTypeInfos(dataAndPartTypeInfoList);
+
       vectorTaskColumnInfo.setDataColumnNums(dataColumnNums);
       vectorTaskColumnInfo.setPartitionColumnCount(partitionColumnCount);
+      vectorTaskColumnInfo.setAvailableVirtualColumnList(availableVirtualColumnList);
       vectorTaskColumnInfo.setUseVectorizedInputFileFormat(useVectorizedInputFileFormat);
 
       // Always set these so EXPLAIN can see.
@@ -1082,6 +1141,14 @@ public class Vectorizer implements PhysicalPlanResolver {
         return false;
       }
 
+      // Set global member indicating which virtual columns are possible to be used by
+      // the Map vertex.
+      availableVectorizedVirtualColumnSet = new HashSet<VirtualColumn>();
+      availableVectorizedVirtualColumnSet.addAll(vectorTaskColumnInfo.availableVirtualColumnList);
+
+      // And, use set to remember which virtual columns were actually referenced.
+      neededVirtualColumnSet = new HashSet<VirtualColumn>();
+
       // Now we are enabled and any issues found from here on out are considered
       // not vectorized issues.
       mapWork.setVectorizationEnabled(true);
@@ -1104,6 +1171,21 @@ public class Vectorizer implements PhysicalPlanResolver {
           }
         }
       }
+
+      List<VirtualColumn> neededVirtualColumnList = new ArrayList<VirtualColumn>();
+      if (!neededVirtualColumnSet.isEmpty()) {
+
+        // Create needed in same order.
+        for (VirtualColumn virtualColumn : vectorTaskColumnInfo.availableVirtualColumnList) {
+          if (neededVirtualColumnSet.contains(virtualColumn)) {
+            neededVirtualColumnList.add(virtualColumn);
+            vectorTaskColumnInfo.allColumnNames.add(virtualColumn.getName());
+            vectorTaskColumnInfo.allTypeInfos.add(virtualColumn.getTypeInfo());
+          }
+        }
+      }
+
+      vectorTaskColumnInfo.setNeededVirtualColumnList(neededVirtualColumnList);
       vectorTaskColumnInfo.setNonVectorizedOps(vnp.getNonVectorizedOps());
       return true;
     }
@@ -1737,6 +1819,10 @@ public class Vectorizer implements PhysicalPlanResolver {
         HiveConf.getBoolVar(hiveConf,
             HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_COMPLEX_TYPES_ENABLED);
 
+    isVectorizedRowIdentifierEnabled =
+        HiveConf.getBoolVar(hiveConf,
+            HiveConf.ConfVars.HIVE_VECTORIZATION_ROW_IDENTIFIER_ENABLED);
+
     isSchemaEvolution =
         HiveConf.getBoolVar(hiveConf,
             HiveConf.ConfVars.HIVE_SCHEMA_EVOLUTION);
@@ -2328,10 +2414,24 @@ public class Vectorizer implements PhysicalPlanResolver {
       VectorExpressionDescriptor.Mode mode, boolean allowComplex) {
     if (desc instanceof ExprNodeColumnDesc) {
       ExprNodeColumnDesc c = (ExprNodeColumnDesc) desc;
-      // Currently, we do not support vectorized virtual columns (see HIVE-5570).
-      if (VirtualColumn.VIRTUAL_COLUMN_NAMES.contains(c.getColumn())) {
-        setExpressionIssue(expressionTitle, "Virtual columns not supported (" + c.getColumn() + ")");
-        return false;
+      String columnName = c.getColumn();
+
+      if (availableVectorizedVirtualColumnSet != null) {
+
+        // For Map, check for virtual columns.
+        VirtualColumn virtualColumn = VirtualColumn.VIRTUAL_COLUMN_NAME_MAP.get(columnName);
+        if (virtualColumn != null) {
+
+          // We support some virtual columns in vectorization for this table scan.
+
+          if (!availableVectorizedVirtualColumnSet.contains(virtualColumn)) {
+            setExpressionIssue(expressionTitle, "Virtual column " + columnName + " is not supported");
+            return false;
+          }
+
+          // Remember we used this one in the query.
+          neededVirtualColumnSet.add(virtualColumn);
+        }
       }
     }
     String typeName = desc.getTypeInfo().getTypeName();
@@ -4180,28 +4280,20 @@ public class Vectorizer implements PhysicalPlanResolver {
     return vectorOp;
   }
 
-  private boolean isVirtualColumn(ColumnInfo column) {
-
-    // Not using method column.getIsVirtualCol() because partitioning columns are also
-    // treated as virtual columns in ColumnInfo.
-    if (VirtualColumn.VIRTUAL_COLUMN_NAMES.contains(column.getInternalName())) {
-        return true;
-    }
-    return false;
-  }
-
   public void debugDisplayAllMaps(BaseWork work) {
 
     VectorizedRowBatchCtx vectorizedRowBatchCtx = work.getVectorizedRowBatchCtx();
 
     String[] allColumnNames = vectorizedRowBatchCtx.getRowColumnNames();
-    Object columnTypeInfos = vectorizedRowBatchCtx.getRowColumnTypeInfos();
+    TypeInfo[] columnTypeInfos = vectorizedRowBatchCtx.getRowColumnTypeInfos();
     int partitionColumnCount = vectorizedRowBatchCtx.getPartitionColumnCount();
+    int virtualColumnCount = vectorizedRowBatchCtx.getVirtualColumnCount();
     String[] scratchColumnTypeNames =vectorizedRowBatchCtx.getScratchColumnTypeNames();
 
-    LOG.debug("debugDisplayAllMaps allColumnNames " + Arrays.toString(allColumnNames));
-    LOG.debug("debugDisplayAllMaps columnTypeInfos " + Arrays.deepToString((Object[]) columnTypeInfos));
+    LOG.debug("debugDisplayAllMaps rowColumnNames " + Arrays.toString(allColumnNames));
+    LOG.debug("debugDisplayAllMaps rowColumnTypeInfos " + Arrays.toString(columnTypeInfos));
     LOG.debug("debugDisplayAllMaps partitionColumnCount " + partitionColumnCount);
+    LOG.debug("debugDisplayAllMaps virtualColumnCount " + virtualColumnCount);
     LOG.debug("debugDisplayAllMaps scratchColumnTypeNames " + Arrays.toString(scratchColumnTypeNames));
   }
 }