You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by tc...@apache.org on 2019/03/14 05:17:21 UTC

[hive] branch master updated: HIVE-21368: Vectorization: Unnecessary Decimal64 -> HiveDecimal conversion (Teddy Choi, reviewed by Gopal V)

This is an automated email from the ASF dual-hosted git repository.

tchoi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new c5219a8  HIVE-21368: Vectorization: Unnecessary Decimal64 -> HiveDecimal conversion (Teddy Choi, reviewed by Gopal V)
c5219a8 is described below

commit c5219a8490758e042604bc659b7a68e4cc94eef9
Author: Teddy Choi <tc...@hortonworks.com>
AuthorDate: Thu Mar 14 14:16:25 2019 +0900

    HIVE-21368: Vectorization: Unnecessary Decimal64 -> HiveDecimal conversion (Teddy Choi, reviewed by Gopal V)
---
 .../test/resources/testconfiguration.properties    |   1 +
 .../hadoop/hive/ql/exec/vector/VectorCopyRow.java  |  86 +++++++-
 .../hive/ql/optimizer/physical/Vectorizer.java     |   2 +-
 .../queries/clientpositive/vector_decimal_join.q   |   7 +
 .../llap/vector_binary_join_groupby.q.out          |   7 +-
 ...q.out => vector_binary_join_groupby.q.out.orig} |   0
 .../clientpositive/llap/vector_decimal_join.q.out  | 221 +++++++++++++++++++++
 .../llap/vector_decimal_mapjoin.q.out              |   8 +-
 .../clientpositive/llap/vectorized_mapjoin3.q.out  |  27 ++-
 ql/src/test/results/clientpositive/masking_1.q.out |  14 +-
 .../spark/vector_decimal_mapjoin.q.out             |   8 +-
 .../vector_binary_join_groupby.q.out               |   2 +-
 ...q.out => vector_binary_join_groupby.q.out.orig} |   0
 .../clientpositive/vector_decimal_join.q.out       | 162 +++++++++++++++
 .../clientpositive/vector_decimal_mapjoin.q.out    |   8 +-
 15 files changed, 511 insertions(+), 42 deletions(-)

diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 4197fb8..20cda6a 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -268,6 +268,7 @@ minillaplocal.shared.query.files=alter_merge_2_orc.q,\
   vector_decimal_aggregate.q,\
   vector_decimal_cast.q,\
   vector_decimal_expressions.q,\
+  vector_decimal_join.q,\
   vector_decimal_mapjoin.q,\
   vector_decimal_math_funcs.q,\
   vector_decimal_precision.q,\
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
index 0cf8491..ec8fe52 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorCopyRow.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type;
@@ -176,9 +177,48 @@ public class VectorCopyRow {
 
     @Override
     void copy(VectorizedRowBatch inBatch, int inBatchIndex, VectorizedRowBatch outBatch, int outBatchIndex) {
-      DecimalColumnVector inColVector = (DecimalColumnVector) inBatch.cols[inColumnIndex];
-      DecimalColumnVector outColVector = (DecimalColumnVector) outBatch.cols[outColumnIndex];
+      ColumnVector inColVector = inBatch.cols[inColumnIndex];
+      ColumnVector outColVector = outBatch.cols[outColumnIndex];
+      if (inColVector instanceof DecimalColumnVector) {
+        if (outColVector instanceof DecimalColumnVector) {
+          copyDecimalToDecimal((DecimalColumnVector) inColVector, inBatchIndex,
+              (DecimalColumnVector) outColVector, outBatchIndex);
+        } else {
+          copyDecimalToDecimal64((DecimalColumnVector) inColVector, inBatchIndex,
+              (Decimal64ColumnVector) outColVector, outBatchIndex);
+        }
+      } else {
+        if (outColVector instanceof DecimalColumnVector) {
+          copyDecimal64ToDecimal((Decimal64ColumnVector) inColVector, inBatchIndex,
+              (DecimalColumnVector) outColVector, outBatchIndex);
+        } else {
+          copyDecimal64ToDecimal64((Decimal64ColumnVector) inColVector, inBatchIndex,
+              (Decimal64ColumnVector) outColVector, outBatchIndex);
+        }
+      }
+    }
+    
+    private void copyDecimalToDecimal(DecimalColumnVector inColVector, int inBatchIndex,
+        DecimalColumnVector outColVector, int outBatchIndex) {
+      if (inColVector.isRepeating) {
+        if (inColVector.noNulls || !inColVector.isNull[0]) {
+          outColVector.isNull[outBatchIndex] = false;
+          outColVector.set(outBatchIndex, inColVector.vector[0]);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      } else {
+        if (inColVector.noNulls || !inColVector.isNull[inBatchIndex]) {
+          outColVector.isNull[outBatchIndex] = false;
+          outColVector.set(outBatchIndex, inColVector.vector[inBatchIndex]);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      }
+    }
 
+    private void copyDecimalToDecimal64(DecimalColumnVector inColVector, int inBatchIndex,
+        Decimal64ColumnVector outColVector, int outBatchIndex) {
       if (inColVector.isRepeating) {
         if (inColVector.noNulls || !inColVector.isNull[0]) {
           outColVector.isNull[outBatchIndex] = false;
@@ -195,6 +235,48 @@ public class VectorCopyRow {
         }
       }
     }
+
+    private void copyDecimal64ToDecimal(Decimal64ColumnVector inColVector, int inBatchIndex,
+        DecimalColumnVector outColVector, int outBatchIndex) {
+      if (inColVector.isRepeating) {
+        if (inColVector.noNulls || !inColVector.isNull[0]) {
+          outColVector.isNull[outBatchIndex] = false;
+          HiveDecimalWritable scratchWritable = inColVector.getScratchWritable();
+          scratchWritable.setFromLongAndScale(inColVector.vector[0], inColVector.scale);
+          outColVector.set(outBatchIndex, scratchWritable);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      } else {
+        if (inColVector.noNulls || !inColVector.isNull[inBatchIndex]) {
+          outColVector.isNull[outBatchIndex] = false;
+          HiveDecimalWritable scratchWritable = inColVector.getScratchWritable();
+          scratchWritable.setFromLongAndScale(inColVector.vector[inBatchIndex], inColVector.scale);
+          outColVector.set(outBatchIndex, scratchWritable);
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      }
+    }
+    
+    private void copyDecimal64ToDecimal64(Decimal64ColumnVector inColVector, int inBatchIndex,
+        Decimal64ColumnVector outColVector, int outBatchIndex) {
+      if (inColVector.isRepeating) {
+        if (inColVector.noNulls || !inColVector.isNull[0]) {
+          outColVector.isNull[outBatchIndex] = false;
+          outColVector.vector[outBatchIndex] = inColVector.vector[0];
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      } else {
+        if (inColVector.noNulls || !inColVector.isNull[inBatchIndex]) {
+          outColVector.isNull[outBatchIndex] = false;
+          outColVector.vector[outBatchIndex] = inColVector.vector[inBatchIndex];
+        } else {
+          VectorizedBatchUtil.setNullColIsNullValue(outColVector, outBatchIndex);
+        }
+      }
+    }
   }
 
   private class TimestampCopyRow extends CopyRow {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
index 1fe0a79..c623adf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
@@ -3618,7 +3618,7 @@ public class Vectorizer implements PhysicalPlanResolver {
 
     // For now, we don't support joins on or using DECIMAL_64.
     VectorExpression[] allBigTableValueExpressions =
-        vContext.getVectorExpressionsUpConvertDecimal64(bigTableExprs);
+        vContext.getVectorExpressions(bigTableExprs);
 
     boolean isFastHashTableEnabled =
         HiveConf.getBoolVar(hiveConf,
diff --git a/ql/src/test/queries/clientpositive/vector_decimal_join.q b/ql/src/test/queries/clientpositive/vector_decimal_join.q
new file mode 100644
index 0000000..03becd1
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/vector_decimal_join.q
@@ -0,0 +1,7 @@
+set hive.auto.convert.join=true;
+set hive.vectorized.execution.enabled=true;
+
+create temporary table foo(x int , y decimal(7,2));
+create temporary table bar(x int , y decimal(7,2));
+set hive.explain.user=false;
+explain vectorization detail select sum(foo.y) from foo, bar where foo.x = bar.x;
diff --git a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out
index 5881607..2095c59 100644
--- a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out
@@ -156,7 +156,6 @@ STAGE PLANS:
                           0 _col10 (type: binary)
                           1 _col10 (type: binary)
                         Map Join Vectorization:
-                            bigTableValueExpressions: ConvertDecimal64ToDecimal(col 9:decimal(4,2)/DECIMAL_64) -> 12:decimal(4,2)
                             className: VectorMapJoinInnerStringOperator
                             native: true
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true
@@ -171,13 +170,13 @@ STAGE PLANS:
                           Select Vectorization:
                               className: VectorSelectOperator
                               native: true
-                              projectedOutputColumnNums: [23]
-                              selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16,_col17,_col18,_col19,_col20,_col21)) -> 23:int
+                              projectedOutputColumnNums: [22]
+                              selectExpressions: VectorUDFAdaptor(hash(_col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16,_col17,_col18,_col19,_col20,_col21)) -> 22:int
                           Statistics: Num rows: 10000 Data size: 6819968 Basic stats: COMPLETE Column stats: COMPLETE
                           Group By Operator
                             aggregations: sum(_col0)
                             Group By Vectorization:
-                                aggregators: VectorUDAFSumLong(col 23:int) -> bigint
+                                aggregators: VectorUDAFSumLong(col 22:int) -> bigint
                                 className: VectorGroupByOperator
                                 groupByMode: HASH
                                 native: false
diff --git a/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out.orig
similarity index 100%
copy from ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out
copy to ql/src/test/results/clientpositive/llap/vector_binary_join_groupby.q.out.orig
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_join.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_join.q.out
new file mode 100644
index 0000000..bf22219
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_join.q.out
@@ -0,0 +1,221 @@
+PREHOOK: query: create temporary table foo(x int , y decimal(7,2))
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@foo
+POSTHOOK: query: create temporary table foo(x int , y decimal(7,2))
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@foo
+PREHOOK: query: create temporary table bar(x int , y decimal(7,2))
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@bar
+POSTHOOK: query: create temporary table bar(x int , y decimal(7,2))
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@bar
+PREHOOK: query: explain vectorization detail select sum(foo.y) from foo, bar where foo.x = bar.x
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bar
+PREHOOK: Input: default@foo
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail select sum(foo.y) from foo, bar where foo.x = bar.x
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bar
+POSTHOOK: Input: default@foo
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Map 1 <- Map 3 (BROADCAST_EDGE)
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: foo
+                  filterExpr: x is not null (type: boolean)
+                  Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                  TableScan Vectorization:
+                      native: true
+                      vectorizationSchemaColumns: [0:x:int, 1:y:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+                  Filter Operator
+                    Filter Vectorization:
+                        className: VectorFilterOperator
+                        native: true
+                        predicateExpression: SelectColumnIsNotNull(col 0:int)
+                    predicate: x is not null (type: boolean)
+                    Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: x (type: int), y (type: decimal(7,2))
+                      outputColumnNames: _col0, _col1
+                      Select Vectorization:
+                          className: VectorSelectOperator
+                          native: true
+                          projectedOutputColumnNums: [0, 1]
+                      Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                      Map Join Operator
+                        condition map:
+                             Inner Join 0 to 1
+                        keys:
+                          0 _col0 (type: int)
+                          1 _col0 (type: int)
+                        Map Join Vectorization:
+                            bigTableKeyColumns: 0:int
+                            bigTableRetainColumnNums: [1]
+                            bigTableValueColumns: 1:decimal(7,2)
+                            className: VectorMapJoinInnerBigOnlyLongOperator
+                            native: true
+                            nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true
+                            nonOuterSmallTableKeyMapping: []
+                            projectedOutput: 1:decimal(7,2)
+                            hashTableImplementationType: OPTIMIZED
+                        outputColumnNames: _col1
+                        input vertices:
+                          1 Map 3
+                        Statistics: Num rows: 1 Data size: 127 Basic stats: COMPLETE Column stats: NONE
+                        Group By Operator
+                          aggregations: sum(_col1)
+                          Group By Vectorization:
+                              aggregators: VectorUDAFSumDecimal64(col 1:decimal(7,2)/DECIMAL_64) -> decimal(17,2)/DECIMAL_64
+                              className: VectorGroupByOperator
+                              groupByMode: HASH
+                              native: false
+                              vectorProcessingMode: HASH
+                              projectedOutputColumnNums: [0]
+                          mode: hash
+                          outputColumnNames: _col0
+                          Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+                          Reduce Output Operator
+                            sort order: 
+                            Reduce Sink Vectorization:
+                                className: VectorReduceSinkEmptyKeyOperator
+                                native: true
+                                nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                                valueColumns: 0:decimal(17,2)
+                            Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+                            value expressions: _col0 (type: decimal(17,2))
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+            Map Vectorization:
+                enabled: true
+                enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
+                inputFileFormats: org.apache.hadoop.mapred.TextInputFormat
+                allNative: false
+                usesVectorUDFAdaptor: false
+                vectorized: true
+                rowBatchContext:
+                    dataColumnCount: 2
+                    includeColumns: [0, 1]
+                    dataColumns: x:int, y:decimal(7,2)/DECIMAL_64
+                    partitionColumnCount: 0
+                    scratchColumnTypeNames: []
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: bar
+                  filterExpr: x is not null (type: boolean)
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+                  TableScan Vectorization:
+                      native: true
+                      vectorizationSchemaColumns: [0:x:int, 1:y:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+                  Filter Operator
+                    Filter Vectorization:
+                        className: VectorFilterOperator
+                        native: true
+                        predicateExpression: SelectColumnIsNotNull(col 0:int)
+                    predicate: x is not null (type: boolean)
+                    Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: x (type: int)
+                      outputColumnNames: _col0
+                      Select Vectorization:
+                          className: VectorSelectOperator
+                          native: true
+                          projectedOutputColumnNums: [0]
+                      Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: int)
+                        Reduce Sink Vectorization:
+                            className: VectorReduceSinkLongOperator
+                            keyColumns: 0:int
+                            native: true
+                            nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                        Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+            Execution mode: vectorized, llap
+            LLAP IO: no inputs
+            Map Vectorization:
+                enabled: true
+                enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true
+                inputFormatFeatureSupport: [DECIMAL_64]
+                featureSupportInUse: [DECIMAL_64]
+                inputFileFormats: org.apache.hadoop.mapred.TextInputFormat
+                allNative: true
+                usesVectorUDFAdaptor: false
+                vectorized: true
+                rowBatchContext:
+                    dataColumnCount: 2
+                    includeColumns: [0]
+                    dataColumns: x:int, y:decimal(7,2)/DECIMAL_64
+                    partitionColumnCount: 0
+                    scratchColumnTypeNames: []
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Vectorization:
+                enabled: true
+                enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true
+                reduceColumnNullOrder: 
+                reduceColumnSortOrder: 
+                allNative: false
+                usesVectorUDFAdaptor: false
+                vectorized: true
+                rowBatchContext:
+                    dataColumnCount: 1
+                    dataColumns: VALUE._col0:decimal(17,2)
+                    partitionColumnCount: 0
+                    scratchColumnTypeNames: []
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0)
+                Group By Vectorization:
+                    aggregators: VectorUDAFSumDecimal(col 0:decimal(17,2)) -> decimal(17,2)
+                    className: VectorGroupByOperator
+                    groupByMode: MERGEPARTIAL
+                    native: false
+                    vectorProcessingMode: GLOBAL
+                    projectedOutputColumnNums: [0]
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  File Sink Vectorization:
+                      className: VectorFileSinkOperator
+                      native: false
+                  Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
diff --git a/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out
index 4236887..f07f2b0 100644
--- a/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_decimal_mapjoin.q.out
@@ -776,7 +776,7 @@ STAGE PLANS:
                           1 _col0 (type: decimal(16,2))
                         Map Join Vectorization:
                             bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 3:decimal(16,2)
-                            bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 4:decimal(16,2)
+                            bigTableValueExpressions: col 0:decimal(16,2)/DECIMAL_64
                             className: VectorMapJoinOperator
                             native: false
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true
@@ -812,7 +812,7 @@ STAGE PLANS:
                     includeColumns: [0]
                     dataColumns: dec:decimal(14,2)/DECIMAL_64, value_dec:decimal(14,2)/DECIMAL_64
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(16,2), decimal(16,2)]
+                    scratchColumnTypeNames: [decimal(16,2)]
         Map 2 
             Map Operator Tree:
                 TableScan
@@ -1058,7 +1058,7 @@ STAGE PLANS:
                           1 _col0 (type: decimal(16,2))
                         Map Join Vectorization:
                             bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 3:decimal(16,2)
-                            bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 4:decimal(16,2), ConvertDecimal64ToDecimal(col 1:decimal(14,2)/DECIMAL_64) -> 5:decimal(14,2)
+                            bigTableValueExpressions: col 0:decimal(16,2)/DECIMAL_64, col 1:decimal(14,2)/DECIMAL_64
                             className: VectorMapJoinOperator
                             native: false
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true
@@ -1094,7 +1094,7 @@ STAGE PLANS:
                     includeColumns: [0, 1]
                     dataColumns: dec:decimal(14,2)/DECIMAL_64, value_dec:decimal(14,2)/DECIMAL_64
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(16,2), decimal(16,2), decimal(14,2), decimal(14,0)]
+                    scratchColumnTypeNames: [decimal(16,2), decimal(14,0)]
         Map 2 
             Map Operator Tree:
                 TableScan
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out
index 4013b36..6d57642 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_mapjoin3.q.out
@@ -153,14 +153,13 @@ STAGE PLANS:
                           1 _col0 (type: int)
                         Map Join Vectorization:
                             bigTableKeyColumns: 1:int
-                            bigTableRetainColumnNums: [3]
-                            bigTableValueColumns: 3:decimal(8,1)
-                            bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1)
+                            bigTableRetainColumnNums: [0]
+                            bigTableValueColumns: 0:decimal(8,1)
                             className: VectorMapJoinInnerBigOnlyLongOperator
                             native: true
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true
                             nonOuterSmallTableKeyMapping: []
-                            projectedOutput: 3:decimal(8,1)
+                            projectedOutput: 0:decimal(8,1)
                             hashTableImplementationType: OPTIMIZED
                         outputColumnNames: _col0
                         input vertices:
@@ -192,7 +191,7 @@ STAGE PLANS:
                     includeColumns: [0, 1]
                     dataColumns: decimal0801_col:decimal(8,1)/DECIMAL_64, int_col_1:int
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(8,1)]
+                    scratchColumnTypeNames: []
         Map 2 
             Map Operator Tree:
                 TableScan
@@ -364,14 +363,13 @@ STAGE PLANS:
                           1 _col0 (type: int)
                         Map Join Vectorization:
                             bigTableKeyColumns: 1:int
-                            bigTableRetainColumnNums: [3]
-                            bigTableValueColumns: 3:decimal(8,1)
-                            bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1)
+                            bigTableRetainColumnNums: [0]
+                            bigTableValueColumns: 0:decimal(8,1)
                             className: VectorMapJoinInnerBigOnlyLongOperator
                             native: true
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true
                             nonOuterSmallTableKeyMapping: []
-                            projectedOutput: 3:decimal(8,1)
+                            projectedOutput: 0:decimal(8,1)
                             hashTableImplementationType: OPTIMIZED
                         outputColumnNames: _col0
                         input vertices:
@@ -403,7 +401,7 @@ STAGE PLANS:
                     includeColumns: [0, 1]
                     dataColumns: decimal0801_col:decimal(8,1)/DECIMAL_64, int_col_1:int
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(8,1)]
+                    scratchColumnTypeNames: []
         Map 2 
             Map Operator Tree:
                 TableScan
@@ -575,14 +573,13 @@ STAGE PLANS:
                           1 _col0 (type: int)
                         Map Join Vectorization:
                             bigTableKeyColumns: 1:int
-                            bigTableRetainColumnNums: [3]
-                            bigTableValueColumns: 3:decimal(8,1)
-                            bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(8,1)/DECIMAL_64) -> 3:decimal(8,1)
+                            bigTableRetainColumnNums: [0]
+                            bigTableValueColumns: 0:decimal(8,1)
                             className: VectorMapJoinInnerBigOnlyLongOperator
                             native: true
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine tez IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true
                             nonOuterSmallTableKeyMapping: []
-                            projectedOutput: 3:decimal(8,1)
+                            projectedOutput: 0:decimal(8,1)
                             hashTableImplementationType: OPTIMIZED
                         outputColumnNames: _col0
                         input vertices:
@@ -614,7 +611,7 @@ STAGE PLANS:
                     includeColumns: [0, 1]
                     dataColumns: decimal0801_col:decimal(8,1)/DECIMAL_64, int_col_1:int
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(8,1)]
+                    scratchColumnTypeNames: []
         Map 2 
             Map Operator Tree:
                 TableScan
diff --git a/ql/src/test/results/clientpositive/masking_1.q.out b/ql/src/test/results/clientpositive/masking_1.q.out
index 140a56f..d915a47 100644
--- a/ql/src/test/results/clientpositive/masking_1.q.out
+++ b/ql/src/test/results/clientpositive/masking_1.q.out
@@ -271,19 +271,19 @@ STAGE PLANS:
           TableScan
             alias: srcpart
             filterExpr: key is not null (type: boolean)
-            Statistics: Num rows: 2000 Data size: 1092000 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 2000 Data size: 1092000 Basic stats: COMPLETE Column stats: COMPLETE
             Filter Operator
               predicate: key is not null (type: boolean)
-              Statistics: Num rows: 2000 Data size: 1092000 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 2000 Data size: 1092000 Basic stats: COMPLETE Column stats: COMPLETE
               Select Operator
                 expressions: key (type: string), value (type: string), ds (type: string), hr (type: string), UDFToDouble(key) (type: double)
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 2000 Data size: 1108000 Basic stats: COMPLETE Column stats: PARTIAL
+                Statistics: Num rows: 2000 Data size: 1108000 Basic stats: COMPLETE Column stats: COMPLETE
                 Reduce Output Operator
                   key expressions: _col4 (type: double)
                   sort order: +
                   Map-reduce partition columns: _col4 (type: double)
-                  Statistics: Num rows: 2000 Data size: 1108000 Basic stats: COMPLETE Column stats: PARTIAL
+                  Statistics: Num rows: 2000 Data size: 1108000 Basic stats: COMPLETE Column stats: COMPLETE
                   value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
       Reduce Operator Tree:
         Join Operator
@@ -293,14 +293,14 @@ STAGE PLANS:
             0 _col2 (type: double)
             1 _col4 (type: double)
           outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col6
-          Statistics: Num rows: 525 Data size: 385350 Basic stats: COMPLETE Column stats: PARTIAL
+          Statistics: Num rows: 525 Data size: 385350 Basic stats: COMPLETE Column stats: COMPLETE
           Select Operator
             expressions: _col0 (type: int), _col1 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: string)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
-            Statistics: Num rows: 525 Data size: 385350 Basic stats: COMPLETE Column stats: PARTIAL
+            Statistics: Num rows: 525 Data size: 385350 Basic stats: COMPLETE Column stats: COMPLETE
             File Output Operator
               compressed: false
-              Statistics: Num rows: 525 Data size: 385350 Basic stats: COMPLETE Column stats: PARTIAL
+              Statistics: Num rows: 525 Data size: 385350 Basic stats: COMPLETE Column stats: COMPLETE
               table:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
diff --git a/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out
index 9bf08fe..f1812fe 100644
--- a/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/spark/vector_decimal_mapjoin.q.out
@@ -830,7 +830,7 @@ STAGE PLANS:
                           1 _col0 (type: decimal(16,2))
                         Map Join Vectorization:
                             bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 3:decimal(16,2)
-                            bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 4:decimal(16,2)
+                            bigTableValueExpressions: col 0:decimal(16,2)/DECIMAL_64
                             className: VectorMapJoinOperator
                             native: false
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true
@@ -865,7 +865,7 @@ STAGE PLANS:
                     includeColumns: [0]
                     dataColumns: dec:decimal(14,2)/DECIMAL_64, value_dec:decimal(14,2)/DECIMAL_64
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(16,2), decimal(16,2)]
+                    scratchColumnTypeNames: [decimal(16,2)]
             Local Work:
               Map Reduce Local Work
 
@@ -1114,7 +1114,7 @@ STAGE PLANS:
                           1 _col0 (type: decimal(16,2))
                         Map Join Vectorization:
                             bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 3:decimal(16,2)
-                            bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 4:decimal(16,2), ConvertDecimal64ToDecimal(col 1:decimal(14,2)/DECIMAL_64) -> 5:decimal(14,2)
+                            bigTableValueExpressions: col 0:decimal(16,2)/DECIMAL_64, col 1:decimal(14,2)/DECIMAL_64
                             className: VectorMapJoinOperator
                             native: false
                             nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, hive.execution.engine spark IN [tez, spark] IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true
@@ -1149,7 +1149,7 @@ STAGE PLANS:
                     includeColumns: [0, 1]
                     dataColumns: dec:decimal(14,2)/DECIMAL_64, value_dec:decimal(14,2)/DECIMAL_64
                     partitionColumnCount: 0
-                    scratchColumnTypeNames: [decimal(16,2), decimal(16,2), decimal(14,2), decimal(14,0)]
+                    scratchColumnTypeNames: [decimal(16,2), decimal(14,0)]
             Local Work:
               Map Reduce Local Work
 
diff --git a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
index 00200f9..50bad92 100644
--- a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
+++ b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
@@ -175,7 +175,7 @@ STAGE PLANS:
                     1 _col10 (type: binary)
                   Map Join Vectorization:
                       bigTableKeyExpressions: col 10:binary
-                      bigTableValueExpressions: col 0:tinyint, col 1:smallint, col 2:int, col 3:bigint, col 4:float, col 5:double, col 6:boolean, col 7:string, col 8:timestamp, ConvertDecimal64ToDecimal(col 9:decimal(4,2)/DECIMAL_64) -> 12:decimal(4,2), col 10:binary
+                      bigTableValueExpressions: col 0:tinyint, col 1:smallint, col 2:int, col 3:bigint, col 4:float, col 5:double, col 6:boolean, col 7:string, col 8:timestamp, col 9:decimal(4,2)/DECIMAL_64, col 10:binary
                       className: VectorMapJoinOperator
                       native: false
                       nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true
diff --git a/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out b/ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out.orig
similarity index 100%
copy from ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out
copy to ql/src/test/results/clientpositive/vector_binary_join_groupby.q.out.orig
diff --git a/ql/src/test/results/clientpositive/vector_decimal_join.q.out b/ql/src/test/results/clientpositive/vector_decimal_join.q.out
new file mode 100644
index 0000000..ad6fce3
--- /dev/null
+++ b/ql/src/test/results/clientpositive/vector_decimal_join.q.out
@@ -0,0 +1,162 @@
+PREHOOK: query: create temporary table foo(x int , y decimal(7,2))
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@foo
+POSTHOOK: query: create temporary table foo(x int , y decimal(7,2))
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@foo
+PREHOOK: query: create temporary table bar(x int , y decimal(7,2))
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@bar
+POSTHOOK: query: create temporary table bar(x int , y decimal(7,2))
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@bar
+PREHOOK: query: explain vectorization detail select sum(foo.y) from foo, bar where foo.x = bar.x
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bar
+PREHOOK: Input: default@foo
+#### A masked pattern was here ####
+POSTHOOK: query: explain vectorization detail select sum(foo.y) from foo, bar where foo.x = bar.x
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bar
+POSTHOOK: Input: default@foo
+#### A masked pattern was here ####
+PLAN VECTORIZATION:
+  enabled: true
+  enabledConditionsMet: [hive.vectorized.execution.enabled IS true]
+
+STAGE DEPENDENCIES:
+  Stage-5 is a root stage
+  Stage-2 depends on stages: Stage-5
+  Stage-0 depends on stages: Stage-2
+
+STAGE PLANS:
+  Stage: Stage-5
+    Map Reduce Local Work
+      Alias -> Map Local Tables:
+        $hdt$_0:foo 
+          Fetch Operator
+            limit: -1
+      Alias -> Map Local Operator Tree:
+        $hdt$_0:foo 
+          TableScan
+            alias: foo
+            filterExpr: x is not null (type: boolean)
+            Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+            Filter Operator
+              predicate: x is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: x (type: int), y (type: decimal(7,2))
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE
+                HashTable Sink Operator
+                  keys:
+                    0 _col0 (type: int)
+                    1 _col0 (type: int)
+
+  Stage: Stage-2
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: bar
+            filterExpr: x is not null (type: boolean)
+            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+            TableScan Vectorization:
+                native: true
+                vectorizationSchemaColumns: [0:x:int, 1:y:decimal(7,2)/DECIMAL_64, 2:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
+            Filter Operator
+              Filter Vectorization:
+                  className: VectorFilterOperator
+                  native: true
+                  predicateExpression: SelectColumnIsNotNull(col 0:int)
+              predicate: x is not null (type: boolean)
+              Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+              Select Operator
+                expressions: x (type: int)
+                outputColumnNames: _col0
+                Select Vectorization:
+                    className: VectorSelectOperator
+                    native: true
+                    projectedOutputColumnNums: [0]
+                Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+                Map Join Operator
+                  condition map:
+                       Inner Join 0 to 1
+                  keys:
+                    0 _col0 (type: int)
+                    1 _col0 (type: int)
+                  Map Join Vectorization:
+                      bigTableKeyExpressions: col 0:int
+                      className: VectorMapJoinOperator
+                      native: false
+                      nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true, Optimized Table and Supports Key Types IS true
+                      nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 1 Data size: 127 Basic stats: COMPLETE Column stats: NONE
+                  Group By Operator
+                    aggregations: sum(_col1)
+                    Group By Vectorization:
+                        aggregators: VectorUDAFSumDecimal(col 0:decimal(7,2)) -> decimal(17,2)
+                        className: VectorGroupByOperator
+                        groupByMode: HASH
+                        native: false
+                        vectorProcessingMode: HASH
+                        projectedOutputColumnNums: [0]
+                    mode: hash
+                    outputColumnNames: _col0
+                    Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      sort order: 
+                      Reduce Sink Vectorization:
+                          className: VectorReduceSinkOperator
+                          native: false
+                          nativeConditionsMet: hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
+                          nativeConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+                      Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+                      value expressions: _col0 (type: decimal(17,2))
+      Execution mode: vectorized
+      Map Vectorization:
+          enabled: true
+          enabledConditionsMet: hive.vectorized.use.vector.serde.deserialize IS true
+          inputFormatFeatureSupport: [DECIMAL_64]
+          featureSupportInUse: [DECIMAL_64]
+          inputFileFormats: org.apache.hadoop.mapred.TextInputFormat
+          allNative: false
+          usesVectorUDFAdaptor: false
+          vectorized: true
+          rowBatchContext:
+              dataColumnCount: 2
+              includeColumns: [0]
+              dataColumns: x:int, y:decimal(7,2)/DECIMAL_64
+              partitionColumnCount: 0
+              scratchColumnTypeNames: [decimal(7,2)]
+      Local Work:
+        Map Reduce Local Work
+      Reduce Vectorization:
+          enabled: false
+          enableConditionsMet: hive.vectorized.execution.reduce.enabled IS true
+          enableConditionsNotMet: hive.execution.engine mr IN [tez, spark] IS false
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          mode: mergepartial
+          outputColumnNames: _col0
+          Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+          File Output Operator
+            compressed: false
+            Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
diff --git a/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out b/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out
index 018676f..1ced256 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_mapjoin.q.out
@@ -727,7 +727,7 @@ STAGE PLANS:
                     1 _col0 (type: decimal(16,2))
                   Map Join Vectorization:
                       bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 3:decimal(16,2)
-                      bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 4:decimal(16,2)
+                      bigTableValueExpressions: col 0:decimal(16,2)/DECIMAL_64
                       className: VectorMapJoinOperator
                       native: false
                       nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true
@@ -760,7 +760,7 @@ STAGE PLANS:
               includeColumns: [0]
               dataColumns: dec:decimal(14,2)/DECIMAL_64, value_dec:decimal(14,2)/DECIMAL_64
               partitionColumnCount: 0
-              scratchColumnTypeNames: [decimal(16,2), decimal(16,2)]
+              scratchColumnTypeNames: [decimal(16,2)]
       Local Work:
         Map Reduce Local Work
 
@@ -976,7 +976,7 @@ STAGE PLANS:
                     1 _col0 (type: decimal(16,2))
                   Map Join Vectorization:
                       bigTableKeyExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 3:decimal(16,2)
-                      bigTableValueExpressions: ConvertDecimal64ToDecimal(col 0:decimal(16,2)/DECIMAL_64) -> 4:decimal(16,2), ConvertDecimal64ToDecimal(col 1:decimal(14,2)/DECIMAL_64) -> 5:decimal(14,2)
+                      bigTableValueExpressions: col 0:decimal(16,2)/DECIMAL_64, col 1:decimal(14,2)/DECIMAL_64
                       className: VectorMapJoinOperator
                       native: false
                       nativeConditionsMet: hive.mapjoin.optimized.hashtable IS true, hive.vectorized.execution.mapjoin.native.enabled IS true, One MapJoin Condition IS true, No nullsafe IS true, Small table vectorizes IS true
@@ -1009,7 +1009,7 @@ STAGE PLANS:
               includeColumns: [0, 1]
               dataColumns: dec:decimal(14,2)/DECIMAL_64, value_dec:decimal(14,2)/DECIMAL_64
               partitionColumnCount: 0
-              scratchColumnTypeNames: [decimal(16,2), decimal(16,2), decimal(14,2), decimal(14,0)]
+              scratchColumnTypeNames: [decimal(16,2), decimal(14,0)]
       Local Work:
         Map Reduce Local Work