You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2020/02/19 17:36:18 UTC
[hive] branch master updated: HIVE-22589: Add storage support for ProlepticCalendar in ORC, Parquet, and Avro (Jesus Camacho Rodriguez, reviewed by David Lavati, László Bodor, Prasanth Jayachandran)
This is an automated email from the ASF dual-hosted git repository.
jcamacho pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 335c2b6 HIVE-22589: Add storage support for ProlepticCalendar in ORC, Parquet, and Avro (Jesus Camacho Rodriguez, reviewed by David Lavati, László Bodor, Prasanth Jayachandran)
335c2b6 is described below
commit 335c2b6832c1542294a224fae2404aea690a5944
Author: Jesus Camacho Rodriguez <jc...@apache.org>
AuthorDate: Wed Dec 4 13:41:13 2019 -0800
HIVE-22589: Add storage support for ProlepticCalendar in ORC, Parquet, and Avro (Jesus Camacho Rodriguez, reviewed by David Lavati, László Bodor, Prasanth Jayachandran)
Close apache/hive#862
---
.../hadoop/hive/common/type/CalendarUtils.java | 183 +++++++++++++++++++++
.../java/org/apache/hadoop/hive/conf/HiveConf.java | 12 ++
data/files/avro_date.txt | 4 +
data/files/avro_legacy_mixed_dates.avro | Bin 0 -> 236 bytes
data/files/avro_legacy_mixed_timestamps.avro | Bin 0 -> 282 bytes
data/files/avro_timestamp.txt | 6 +-
data/files/orc_legacy_mixed_dates.orc | Bin 0 -> 213 bytes
data/files/orc_legacy_mixed_timestamps.orc | Bin 0 -> 276 bytes
data/files/parquet_legacy_mixed_dates.parq | Bin 0 -> 245 bytes
data/files/parquet_legacy_mixed_timestamps.parq | Bin 0 -> 359 bytes
.../test/resources/testconfiguration.properties | 19 +++
.../io/decode/GenericColumnVectorProducer.java | 6 +
.../llap/io/decode/OrcEncodedDataConsumer.java | 8 +-
.../hive/llap/io/encoded/OrcEncodedDataReader.java | 3 +-
.../llap/io/metadata/ConsumerFileMetadata.java | 2 +
.../hive/llap/io/metadata/OrcFileMetadata.java | 8 +
.../metastore/filemeta/OrcFileMetadataHandler.java | 2 +-
pom.xml | 2 +-
ql/pom.xml | 1 +
.../hive/ql/exec/vector/VectorizedBatchUtil.java | 8 +-
.../hive/ql/io/avro/AvroContainerOutputFormat.java | 3 +
.../hive/ql/io/avro/AvroGenericRecordReader.java | 26 ++-
.../hadoop/hive/ql/io/orc/ExternalCache.java | 4 +-
.../org/apache/hadoop/hive/ql/io/orc/OrcFile.java | 11 ++
.../hadoop/hive/ql/io/orc/OrcFileFormatProxy.java | 11 +-
.../hadoop/hive/ql/io/orc/OrcInputFormat.java | 7 +-
.../hadoop/hive/ql/io/orc/RecordReaderImpl.java | 3 +-
.../apache/hadoop/hive/ql/io/orc/WriterImpl.java | 5 +-
.../ql/io/parquet/ParquetRecordReaderBase.java | 8 +
.../hive/ql/io/parquet/convert/ETypeConverter.java | 10 +-
.../io/parquet/read/DataWritableReadSupport.java | 36 ++++
.../ql/io/parquet/timestamp/NanoTimeUtils.java | 6 +-
.../parquet/vector/BaseVectorizedColumnReader.java | 3 +
.../parquet/vector/VectorizedListColumnReader.java | 6 +-
.../vector/VectorizedParquetRecordReader.java | 20 +--
.../vector/VectorizedPrimitiveColumnReader.java | 52 +++++-
.../io/parquet/write/DataWritableWriteSupport.java | 8 +-
.../ql/io/parquet/write/DataWritableWriter.java | 10 +-
.../hive/ql/io/sarg/ConvertAstToSearchArg.java | 18 +-
.../ql/optimizer/FixedBucketPruningOptimizer.java | 5 +-
.../vector/util/batchgen/VectorBatchGenerator.java | 6 +-
.../hive/ql/io/orc/TestInputOutputFormat.java | 6 +-
.../apache/hadoop/hive/ql/io/orc/TestOrcFile.java | 3 +-
.../hive/ql/io/orc/encoded/TestEncodedOrcFile.java | 15 +-
.../hive/ql/io/parquet/TestDataWritableWriter.java | 2 +-
.../parquet/serde/TestParquetTimestampUtils.java | 37 +++--
.../hive/ql/io/sarg/TestConvertAstToSearchArg.java | 2 +-
.../clientpositive/avro_hybrid_mixed_date.q | 22 +++
.../clientpositive/avro_hybrid_mixed_timestamp.q | 22 +++
.../clientpositive/avro_legacy_mixed_date.q | 14 ++
.../clientpositive/avro_legacy_mixed_timestamp.q | 14 ++
.../clientpositive/avro_proleptic_mixed_date.q | 24 +++
.../avro_proleptic_mixed_timestamp.q | 24 +++
...ge_allowincompatible_vectorization_false_date.q | 6 +
..._allowincompatible_vectorization_false_date2.q} | 16 +-
...e_allowincompatible_vectorization_false_date3.q | 21 +++
.../queries/clientpositive/orc_hybrid_mixed_date.q | 20 +++
.../clientpositive/orc_hybrid_mixed_timestamp.q | 20 +++
.../queries/clientpositive/orc_legacy_mixed_date.q | 12 ++
.../clientpositive/orc_legacy_mixed_timestamp.q | 12 ++
.../clientpositive/orc_ppd_schema_evol_3a.q | 8 +-
.../clientpositive/orc_proleptic_mixed_date.q | 22 +++
.../clientpositive/orc_proleptic_mixed_timestamp.q | 22 +++
.../clientpositive/orc_schema_evolution_float.q | 8 +-
.../clientpositive/parquet_hybrid_mixed_date.q | 20 +++
.../parquet_hybrid_mixed_timestamp.q | 16 ++
.../clientpositive/parquet_legacy_mixed_date.q | 12 ++
.../parquet_legacy_mixed_timestamp.q | 8 +
.../test/queries/clientpositive/parquet_ppd_date.q | 28 ++++
.../clientpositive/parquet_proleptic_mixed_date.q | 22 +++
.../acid_bloom_filter_orc_file_dump.q.out | 6 +-
.../results/clientpositive/acid_nullscan.q.out | 4 +-
.../test/results/clientpositive/acid_stats2.q.out | 18 +-
.../results/clientpositive/acid_table_stats.q.out | 12 +-
.../results/clientpositive/autoColumnStats_4.q.out | 4 +-
ql/src/test/results/clientpositive/avro_date.q.out | 16 ++
.../avro_schema_evolution_native.q.out | 4 +-
.../results/clientpositive/avro_timestamp.q.out | 16 ++
.../beeline/materialized_view_create_rewrite.q.out | 4 +-
.../clientpositive/cbo_ppd_non_deterministic.q.out | 4 +-
.../columnStatsUpdateForStatsOptimizer_2.q.out | 6 +-
.../results/clientpositive/deleteAnalyze.q.out | 2 +-
.../extrapolate_part_stats_date.q.out | 8 +-
.../extrapolate_part_stats_full.q.out | 24 +--
.../extrapolate_part_stats_partial.q.out | 76 ++++-----
.../clientpositive/llap/acid_bucket_pruning.q.out | 4 +-
.../clientpositive/llap/acid_no_buckets.q.out | 4 +-
.../clientpositive/llap/alter_merge_orc.q.out | 24 +--
.../llap/alter_merge_stats_orc.q.out | 22 +--
.../llap/avro_hybrid_mixed_date.q.out | 79 +++++++++
.../llap/avro_hybrid_mixed_timestamp.q.out | 79 +++++++++
.../llap/avro_legacy_mixed_date.q.out | 62 +++++++
.../llap/avro_legacy_mixed_timestamp.q.out | 62 +++++++
.../llap/avro_proleptic_mixed_date.q.out | 79 +++++++++
.../llap/avro_proleptic_mixed_timestamp.q.out | 79 +++++++++
.../clientpositive/llap/bucket_map_join_tez2.q.out | 4 +-
...llowincompatible_vectorization_false_date.q.out | 31 ++++
...lowincompatible_vectorization_false_date2.q.out | 53 ++++++
...lowincompatible_vectorization_false_date3.q.out | 53 ++++++
.../llap/column_table_stats_orc.q.out | 30 ++--
.../clientpositive/llap/default_constraint.q.out | 14 +-
.../clientpositive/llap/deleteAnalyze.q.out | 2 +-
.../llap/dynamic_semijoin_reduction.q.out | 6 +-
.../llap/dynamic_semijoin_user_level.q.out | 6 +-
.../llap/dynpart_sort_opt_vectorization.q.out | 16 +-
.../llap/dynpart_sort_optimization2.q.out | 8 +-
.../llap/dynpart_sort_optimization_acid.q.out | 2 +-
.../llap/extrapolate_part_stats_partial_ndv.q.out | 38 ++---
.../insert_values_orig_table_use_metadata.q.out | 6 +-
.../clientpositive/llap/llap_nullscan.q.out | 4 +-
.../llap/materialized_view_create.q.out | 6 +-
.../llap/materialized_view_create_rewrite.q.out | 4 +-
.../llap/materialized_view_create_rewrite_4.q.out | 12 +-
.../llap/materialized_view_create_rewrite_5.q.out | 2 +-
.../materialized_view_create_rewrite_dummy.q.out | 4 +-
...materialized_view_create_rewrite_multi_db.q.out | 4 +-
...erialized_view_create_rewrite_time_window.q.out | 6 +-
...ialized_view_create_rewrite_time_window_2.q.out | 6 +-
.../llap/materialized_view_describe.q.out | 6 +-
.../llap/materialized_view_drop.q.out | 6 +-
.../llap/materialized_view_partition_cluster.q.out | 6 +-
.../llap/materialized_view_partitioned.q.out | 2 +-
.../results/clientpositive/llap/orc_analyze.q.out | 34 ++--
.../llap/orc_hybrid_mixed_date.q.out | 75 +++++++++
.../llap/orc_hybrid_mixed_timestamp.q.out | 75 +++++++++
.../llap/orc_legacy_mixed_date.q.out | 58 +++++++
.../llap/orc_legacy_mixed_timestamp.q.out | 58 +++++++
.../clientpositive/llap/orc_llap_counters.q.out | 2 +-
.../clientpositive/llap/orc_llap_counters1.q.out | 2 +-
.../clientpositive/llap/orc_llap_nonvector.q.out | 2 +-
.../results/clientpositive/llap/orc_merge1.q.out | 16 +-
.../results/clientpositive/llap/orc_merge10.q.out | 14 +-
.../results/clientpositive/llap/orc_merge11.q.out | 9 +-
.../results/clientpositive/llap/orc_merge2.q.out | 2 +-
.../results/clientpositive/llap/orc_merge3.q.out | 2 +-
.../results/clientpositive/llap/orc_merge4.q.out | 6 +-
.../clientpositive/llap/orc_ppd_basic.q.out | 4 +-
.../llap/orc_ppd_schema_evol_3a.q.out | 8 +-
.../llap/orc_proleptic_mixed_date.q.out | 75 +++++++++
.../llap/orc_proleptic_mixed_timestamp.q.out | 75 +++++++++
.../llap/parquet_hybrid_mixed_date.q.out | 75 +++++++++
.../llap/parquet_hybrid_mixed_timestamp.q.out | 59 +++++++
.../llap/parquet_legacy_mixed_date.q.out | 58 +++++++
.../llap/parquet_legacy_mixed_timestamp.q.out | 42 +++++
.../llap/parquet_proleptic_mixed_date.q.out | 75 +++++++++
.../llap/schema_evol_orc_acid_part.q.out | 12 +-
.../llap/schema_evol_orc_acid_part_llap_io.q.out | 12 +-
.../llap/schema_evol_orc_acid_table.q.out | 12 +-
.../llap/schema_evol_orc_acid_table_llap_io.q.out | 12 +-
.../schema_evol_orc_acidvec_part_llap_io.q.out | 12 +-
.../llap/schema_evol_orc_acidvec_table.q.out | 12 +-
.../schema_evol_orc_acidvec_table_llap_io.q.out | 12 +-
.../llap/schema_evol_orc_nonvec_part.q.out | 12 +-
.../schema_evol_orc_nonvec_part_all_complex.q.out | 16 +-
..._evol_orc_nonvec_part_all_complex_llap_io.q.out | 16 +-
...schema_evol_orc_nonvec_part_all_primitive.q.out | 20 +--
...vol_orc_nonvec_part_all_primitive_llap_io.q.out | 20 +--
.../llap/schema_evol_orc_nonvec_part_llap_io.q.out | 12 +-
.../llap/schema_evol_orc_nonvec_table.q.out | 12 +-
.../schema_evol_orc_nonvec_table_llap_io.q.out | 12 +-
.../llap/schema_evol_orc_vec_part.q.out | 12 +-
.../schema_evol_orc_vec_part_all_complex.q.out | 16 +-
...ema_evol_orc_vec_part_all_complex_llap_io.q.out | 16 +-
.../schema_evol_orc_vec_part_all_primitive.q.out | 20 +--
...a_evol_orc_vec_part_all_primitive_llap_io.q.out | 20 +--
.../llap/schema_evol_orc_vec_table.q.out | 12 +-
.../llap/schema_evol_orc_vec_table_llap_io.q.out | 12 +-
.../clientpositive/llap/schema_evol_stats.q.out | 6 +-
.../clientpositive/llap/sqlmerge_stats.q.out | 6 +-
.../llap/tez_fixed_bucket_pruning.q.out | 8 +-
.../llap/vectorization_short_regress.q.out | 4 +-
.../test/results/clientpositive/masking_mv.q.out | 4 +-
.../materialized_view_create_acid.q.out | 2 +-
.../results/clientpositive/orc_file_dump.q.out | 9 +-
.../test/results/clientpositive/orc_merge10.q.out | 6 +-
.../test/results/clientpositive/orc_merge11.q.out | 9 +-
.../test/results/clientpositive/orc_merge12.q.out | 3 +-
.../orc_schema_evolution_float.q.out | 28 ++--
.../results/clientpositive/parquet_ppd_date.q.out | 116 +++++++++++++
.../results/clientpositive/parquet_stats.q.out | 2 +-
ql/src/test/results/clientpositive/row__id.q.out | 18 +-
.../clientpositive/spark/alter_merge_orc.q.out | 24 +--
.../spark/alter_merge_stats_orc.q.out | 22 +--
.../spark/bucket_map_join_tez2.q.out | 4 +-
.../results/clientpositive/spark/orc_merge1.q.out | 8 +-
.../results/clientpositive/spark/orc_merge2.q.out | 2 +-
.../results/clientpositive/spark/orc_merge3.q.out | 2 +-
.../results/clientpositive/spark/orc_merge4.q.out | 6 +-
.../results/clientpositive/spark/orc_merge5.q.out | 8 +-
.../results/clientpositive/spark/orc_merge6.q.out | 16 +-
.../results/clientpositive/spark/orc_merge7.q.out | 16 +-
.../results/clientpositive/spark/orc_merge8.q.out | 10 +-
.../results/clientpositive/spark/orc_merge9.q.out | 12 +-
.../clientpositive/spark/orc_merge_incompat1.q.out | 20 +--
.../clientpositive/spark/orc_merge_incompat2.q.out | 24 +--
.../spark/vectorization_short_regress.q.out | 24 +--
.../results/clientpositive/stats_nonpart.q.out | 2 +-
.../test/results/clientpositive/stats_part.q.out | 8 +-
.../test/results/clientpositive/stats_part2.q.out | 28 ++--
.../results/clientpositive/stats_sizebug.q.out | 4 +-
.../tez/acid_vectorization_original_tez.q.out | 24 +--
.../results/clientpositive/tez/orc_merge12.q.out | 3 +-
.../clientpositive/type_change_test_fraction.q.out | 64 +++----
.../type_change_test_fraction_vectorized.q.out | 64 +++----
.../results/clientpositive/typechangetest.q.out | 72 ++++----
.../hadoop/hive/serde2/avro/AvroDeserializer.java | 63 +++++--
.../serde2/avro/AvroGenericRecordWritable.java | 9 +-
.../apache/hadoop/hive/serde2/avro/AvroSerDe.java | 3 +-
.../hadoop/hive/serde2/avro/AvroSerializer.java | 23 ++-
.../hive/serde2/avro/TestAvroDeserializer.java | 2 +-
.../hadoop/hive/metastore/FileFormatProxy.java | 4 +-
.../hadoop/hive/metastore/FileMetadataHandler.java | 2 +-
.../ql/exec/vector/TestStructColumnVector.java | 2 +
213 files changed, 3140 insertions(+), 809 deletions(-)
diff --git a/common/src/java/org/apache/hadoop/hive/common/type/CalendarUtils.java b/common/src/java/org/apache/hadoop/hive/common/type/CalendarUtils.java
new file mode 100644
index 0000000..9b491d0
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/type/CalendarUtils.java
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.common.type;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Conversion utilities from the hybrid Julian/Gregorian calendar to/from the
+ * proleptic Gregorian.
+ *
+ * The semantics here are to hold the string representation constant and change
+ * the epoch offset rather than holding the instant in time constant and change
+ * the string representation.
+ *
+ * These utilities will be fast for the common case (> 1582 AD), but slow for
+ * old dates.
+ */
+public class CalendarUtils {
+
+ private static SimpleDateFormat createFormatter(String fmt,
+ GregorianCalendar calendar) {
+ SimpleDateFormat result = new SimpleDateFormat(fmt);
+ result.setCalendar(calendar);
+ return result;
+ }
+
+ private static final String DATE = "yyyy-MM-dd";
+ private static final String TIME = DATE + " HH:mm:ss.SSS";
+ private static final TimeZone UTC = TimeZone.getTimeZone("UTC");
+ private static final GregorianCalendar HYBRID = new GregorianCalendar();
+ private static final ThreadLocal<SimpleDateFormat> HYBRID_DATE_FORMAT =
+ ThreadLocal.withInitial(() -> createFormatter(DATE, HYBRID));
+ private static final ThreadLocal<SimpleDateFormat> HYBRID_TIME_FORMAT =
+ ThreadLocal.withInitial(() -> createFormatter(TIME, HYBRID));
+ private static final long SWITCHOVER_MILLIS;
+ private static final long SWITCHOVER_DAYS;
+ private static final GregorianCalendar PROLEPTIC = new GregorianCalendar();
+ private static final ThreadLocal<SimpleDateFormat> PROLEPTIC_DATE_FORMAT =
+ ThreadLocal.withInitial(() -> createFormatter(DATE, PROLEPTIC));
+ private static final ThreadLocal<SimpleDateFormat> PROLEPTIC_TIME_FORMAT =
+ ThreadLocal.withInitial(() -> createFormatter(TIME, PROLEPTIC));
+
+ static {
+ HYBRID.setTimeZone(UTC);
+ PROLEPTIC.setTimeZone(UTC);
+ PROLEPTIC.setGregorianChange(new Date(Long.MIN_VALUE));
+
+ // Get the last day where the two calendars agree with each other.
+ try {
+ SWITCHOVER_MILLIS = HYBRID_DATE_FORMAT.get().parse("1582-10-15").getTime();
+ SWITCHOVER_DAYS = TimeUnit.MILLISECONDS.toDays(SWITCHOVER_MILLIS);
+ } catch (ParseException e) {
+ throw new IllegalArgumentException("Can't parse switch over date", e);
+ }
+ }
+
+ /**
+ * Convert an epoch day from the hybrid Julian/Gregorian calendar to the
+ * proleptic Gregorian.
+ * @param hybrid day of epoch in the hybrid Julian/Gregorian
+ * @return day of epoch in the proleptic Gregorian
+ */
+ public static int convertDateToProleptic(int hybrid) {
+ int proleptic = hybrid;
+ if (hybrid < SWITCHOVER_DAYS) {
+ String dateStr = HYBRID_DATE_FORMAT.get().format(
+ new Date(TimeUnit.DAYS.toMillis(hybrid)));
+ try {
+ proleptic = (int) TimeUnit.MILLISECONDS.toDays(
+ PROLEPTIC_DATE_FORMAT.get().parse(dateStr).getTime());
+ } catch (ParseException e) {
+ throw new IllegalArgumentException("Can't parse " + dateStr, e);
+ }
+ }
+ return proleptic;
+ }
+
+ /**
+ * Convert an epoch day from the proleptic Gregorian calendar to the hybrid
+ * Julian/Gregorian.
+ * @param proleptic day of epoch in the proleptic Gregorian
+ * @return day of epoch in the hybrid Julian/Gregorian
+ */
+ public static int convertDateToHybrid(int proleptic) {
+ int hyrbid = proleptic;
+ if (proleptic < SWITCHOVER_DAYS) {
+ String dateStr = PROLEPTIC_DATE_FORMAT.get().format(
+ new Date(TimeUnit.DAYS.toMillis(proleptic)));
+ try {
+ hyrbid = (int) TimeUnit.MILLISECONDS.toDays(
+ HYBRID_DATE_FORMAT.get().parse(dateStr).getTime());
+ } catch (ParseException e) {
+ throw new IllegalArgumentException("Can't parse " + dateStr, e);
+ }
+ }
+ return hyrbid;
+ }
+
+ public static int convertDate(int original,
+ boolean fromProleptic,
+ boolean toProleptic) {
+ if (fromProleptic != toProleptic) {
+ return toProleptic
+ ? convertDateToProleptic(original)
+ : convertDateToHybrid(original);
+ } else {
+ return original;
+ }
+ }
+
+ public static long convertTime(long original,
+ boolean fromProleptic,
+ boolean toProleptic) {
+ if (fromProleptic != toProleptic) {
+ return toProleptic
+ ? convertTimeToProleptic(original)
+ : convertTimeToHybrid(original);
+ } else {
+ return original;
+ }
+ }
+ /**
+ * Convert epoch millis from the hybrid Julian/Gregorian calendar to the
+ * proleptic Gregorian.
+ * @param hybrid millis of epoch in the hybrid Julian/Gregorian
+ * @return millis of epoch in the proleptic Gregorian
+ */
+ public static long convertTimeToProleptic(long hybrid) {
+ long proleptic = hybrid;
+ if (hybrid < SWITCHOVER_MILLIS) {
+ String dateStr = HYBRID_TIME_FORMAT.get().format(new Date(hybrid));
+ try {
+ proleptic = PROLEPTIC_TIME_FORMAT.get().parse(dateStr).getTime();
+ } catch (ParseException e) {
+ throw new IllegalArgumentException("Can't parse " + dateStr, e);
+ }
+ }
+ return proleptic;
+ }
+
+ /**
+ * Convert epoch millis from the proleptic Gregorian calendar to the hybrid
+ * Julian/Gregorian.
+ * @param proleptic millis of epoch in the proleptic Gregorian
+ * @return millis of epoch in the hybrid Julian/Gregorian
+ */
+ public static long convertTimeToHybrid(long proleptic) {
+ long hybrid = proleptic;
+ if (proleptic < SWITCHOVER_MILLIS) {
+ String dateStr = PROLEPTIC_TIME_FORMAT.get().format(new Date(proleptic));
+ try {
+ hybrid = HYBRID_TIME_FORMAT.get().parse(dateStr).getTime();
+ } catch (ParseException e) {
+ throw new IllegalArgumentException("Can't parse " + dateStr, e);
+ }
+ }
+ return hybrid;
+ }
+
+ private CalendarUtils() {
+ throw new UnsupportedOperationException();
+ }
+}
\ No newline at end of file
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 0eee582..c872e69 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2011,12 +2011,24 @@ public class HiveConf extends Configuration {
HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION("hive.parquet.timestamp.skip.conversion", true,
"Current Hive implementation of parquet stores timestamps to UTC, this flag allows skipping of the conversion" +
"on reading parquet files from other tools"),
+ HIVE_PARQUET_DATE_PROLEPTIC_GREGORIAN("hive.parquet.date.proleptic.gregorian", false,
+ "Should we write date using the proleptic Gregorian calendar instead of the hybrid Julian Gregorian?\n" +
+ "Hybrid is the default."),
+ HIVE_PARQUET_DATE_PROLEPTIC_GREGORIAN_DEFAULT("hive.parquet.date.proleptic.gregorian.default", false,
+ "This value controls whether date type in Parquet files was written using the hybrid or proleptic\n" +
+ "calendar. Hybrid is the default."),
HIVE_AVRO_TIMESTAMP_SKIP_CONVERSION("hive.avro.timestamp.skip.conversion", false,
"Some older Hive implementations (pre-3.1) wrote Avro timestamps in a UTC-normalized" +
"manner, while from version 3.1 until now Hive wrote time zone agnostic timestamps. " +
"Setting this flag to true will treat legacy timestamps as time zone agnostic. Setting " +
"it to false will treat legacy timestamps as UTC-normalized. This flag will not affect " +
"timestamps written after this change."),
+ HIVE_AVRO_PROLEPTIC_GREGORIAN("hive.avro.proleptic.gregorian", false,
+ "Should we write date and timestamp using the proleptic Gregorian calendar instead of the hybrid Julian Gregorian?\n" +
+ "Hybrid is the default."),
+ HIVE_AVRO_PROLEPTIC_GREGORIAN_DEFAULT("hive.avro.proleptic.gregorian.default", false,
+ "This value controls whether date and timestamp type in Avro files was written using the hybrid or proleptic\n" +
+ "calendar. Hybrid is the default."),
HIVE_INT_TIMESTAMP_CONVERSION_IN_SECONDS("hive.int.timestamp.conversion.in.seconds", false,
"Boolean/tinyint/smallint/int/bigint value is interpreted as milliseconds during the timestamp conversion.\n" +
"Set this flag to true to interpret the value as seconds to be consistent with float/double." ),
diff --git a/data/files/avro_date.txt b/data/files/avro_date.txt
index 0858896..939db26 100644
--- a/data/files/avro_date.txt
+++ b/data/files/avro_date.txt
@@ -2,3 +2,7 @@
2014-02-11|baz:1981-12-16|2011-09-05
1947-02-11|baz:1921-12-16|2011-09-05
8200-02-11|baz:6981-12-16|1039-09-05
+1411-02-21|foo:0980-12-16,bar:0998-05-07|0011-09-04,1411-09-05
+1211-02-11|baz:0981-12-16|0011-09-05
+0849-02-11|baz:0921-12-16|0011-09-05
+0605-02-11|baz:0981-12-16|0039-09-05
diff --git a/data/files/avro_legacy_mixed_dates.avro b/data/files/avro_legacy_mixed_dates.avro
new file mode 100644
index 0000000..f80f6d9
Binary files /dev/null and b/data/files/avro_legacy_mixed_dates.avro differ
diff --git a/data/files/avro_legacy_mixed_timestamps.avro b/data/files/avro_legacy_mixed_timestamps.avro
new file mode 100644
index 0000000..690f5bd
Binary files /dev/null and b/data/files/avro_legacy_mixed_timestamps.avro differ
diff --git a/data/files/avro_timestamp.txt b/data/files/avro_timestamp.txt
index a989f0e..6af27ba 100644
--- a/data/files/avro_timestamp.txt
+++ b/data/files/avro_timestamp.txt
@@ -1,4 +1,8 @@
2012-02-21 07:08:09.123|foo:1980-12-16 07:08:09.123,bar:1998-05-07 07:08:09.123|2011-09-04 07:08:09.123,2011-09-05 07:08:09.123
2014-02-11 07:08:09.123|baz:1981-12-16 07:08:09.123|2011-09-05 07:08:09.123
1947-02-11 07:08:09.123|baz:1921-12-16 07:08:09.123|2011-09-05 07:08:09.123
-8200-02-11 07:08:09.123|baz:6981-12-16 07:08:09.123|1039-09-05 07:08:09.123
\ No newline at end of file
+8200-02-11 07:08:09.123|baz:6981-12-16 07:08:09.123|1039-09-05 07:08:09.123
+1412-02-21 07:08:09.123|foo:0980-12-16 07:08:09.123,bar:0998-05-07 07:08:09.123|0011-09-04 07:08:09.123,0011-09-05 07:08:09.123
+1214-02-11 07:08:09.123|baz:0981-12-16 07:08:09.123|0011-09-05 07:08:09.123
+0847-02-11 07:08:09.123|baz:0921-12-16 07:08:09.123|0011-09-05 07:08:09.123
+0600-02-11 07:08:09.123|baz:0981-12-16 07:08:09.123|0039-09-05 07:08:09.123
\ No newline at end of file
diff --git a/data/files/orc_legacy_mixed_dates.orc b/data/files/orc_legacy_mixed_dates.orc
new file mode 100644
index 0000000..94c561d
Binary files /dev/null and b/data/files/orc_legacy_mixed_dates.orc differ
diff --git a/data/files/orc_legacy_mixed_timestamps.orc b/data/files/orc_legacy_mixed_timestamps.orc
new file mode 100644
index 0000000..137fb25
Binary files /dev/null and b/data/files/orc_legacy_mixed_timestamps.orc differ
diff --git a/data/files/parquet_legacy_mixed_dates.parq b/data/files/parquet_legacy_mixed_dates.parq
new file mode 100644
index 0000000..b1dbacd
Binary files /dev/null and b/data/files/parquet_legacy_mixed_dates.parq differ
diff --git a/data/files/parquet_legacy_mixed_timestamps.parq b/data/files/parquet_legacy_mixed_timestamps.parq
new file mode 100644
index 0000000..84aaf1c
Binary files /dev/null and b/data/files/parquet_legacy_mixed_timestamps.parq differ
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 34845f0..94467a4 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -478,6 +478,12 @@ minillaplocal.query.files=\
auto_sortmerge_join_8.q,\
auto_sortmerge_join_9.q,\
avro_extschema_insert.q,\
+ avro_hybrid_mixed_date.q,\
+ avro_hybrid_mixed_timestamp.q,\
+ avro_legacy_mixed_date.q,\
+ avro_legacy_mixed_timestamp.q,\
+ avro_proleptic_mixed_date.q,\
+ avro_proleptic_mixed_timestamp.q,\
bucket4.q,\
bucket_groupby.q,\
bucket_many.q,\
@@ -500,6 +506,8 @@ minillaplocal.query.files=\
runtime_stats_hs2.q,\
bucketsortoptimize_insert_2.q,\
change_allowincompatible_vectorization_false_date.q,\
+ change_allowincompatible_vectorization_false_date2.q,\
+ change_allowincompatible_vectorization_false_date3.q,\
check_constraint.q,\
cbo_gby.q,\
cbo_join.q,\
@@ -676,12 +684,23 @@ minillaplocal.query.files=\
orc_ppd_decimal.q,\
orc_ppd_timestamp.q,\
order_null.q,\
+ parquet_hybrid_mixed_date.q,\
+ parquet_hybrid_mixed_timestamp.q,\
+ parquet_legacy_mixed_date.q,\
+ parquet_legacy_mixed_timestamp.q,\
+ parquet_proleptic_mixed_date.q,\
partition_ctas.q,\
partition_multilevels.q,\
partition_shared_scan.q,\
partition_pruning.q,\
ptf.q,\
ptf_streaming.q,\
+ orc_hybrid_mixed_date.q,\
+ orc_hybrid_mixed_timestamp.q,\
+ orc_legacy_mixed_date.q,\
+ orc_legacy_mixed_timestamp.q,\
+ orc_proleptic_mixed_date.q,\
+ orc_proleptic_mixed_timestamp.q,\
runtime_stats_merge.q,\
quotedid_smb.q,\
reducesink_dedup.q,\
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
index af853e3..1617692 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/GenericColumnVectorProducer.java
@@ -52,6 +52,7 @@ import org.apache.hive.common.util.FixedSizedObjectPool;
import org.apache.orc.CompressionKind;
import org.apache.orc.OrcFile;
import org.apache.orc.OrcProto;
+import org.apache.orc.OrcProto.CalendarKind;
import org.apache.orc.OrcProto.ColumnEncoding;
import org.apache.orc.OrcProto.RowIndex;
import org.apache.orc.OrcProto.RowIndexEntry;
@@ -292,5 +293,10 @@ public class GenericColumnVectorProducer implements ColumnVectorProducer {
public OrcFile.Version getFileVersion() {
return null;
}
+
+ @Override
+ public CalendarKind getCalendar() {
+ return CalendarKind.JULIAN_GREGORIAN;
+ }
}
}
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
index 83931c2..1b41d4e 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcEncodedDataConsumer.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.llap.io.metadata.ConsumerStripeMetadata;
import org.apache.hadoop.hive.llap.metrics.LlapDaemonIOMetrics;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DateColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -44,6 +45,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.UnionColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.orc.CompressionCodec;
+import org.apache.orc.OrcProto.CalendarKind;
import org.apache.orc.impl.PositionProvider;
import org.apache.hadoop.hive.ql.io.orc.encoded.Consumer;
import org.apache.hadoop.hive.ql.io.orc.encoded.EncodedTreeReaderFactory;
@@ -222,7 +224,8 @@ public class OrcEncodedDataConsumer
.setSchemaEvolution(evolution).skipCorrupt(skipCorrupt)
.writerTimeZone(stripeMetadata.getWriterTimezone())
.fileFormat(fileMetadata == null ? null : fileMetadata.getFileVersion())
- .useUTCTimestamp(true);
+ .useUTCTimestamp(true)
+ .setProlepticGregorian(fileMetadata != null && fileMetadata.getCalendar() == CalendarKind.PROLEPTIC_GREGORIAN, true);
this.batchSchemas = includes.getBatchReaderTypes(fileSchema);
StructTreeReader treeReader = EncodedTreeReaderFactory.createRootTreeReader(
batchSchemas, stripeMetadata.getEncodings(), batch, codec, context, useDecimal64ColumnVectors);
@@ -244,8 +247,9 @@ public class OrcEncodedDataConsumer
case SHORT:
case INT:
case LONG:
- case DATE:
return new LongColumnVector(batchSize);
+ case DATE:
+ return new DateColumnVector(batchSize);
case FLOAT:
case DOUBLE:
return new DoubleColumnVector(batchSize);
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index 2893870..92df717 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -82,6 +82,7 @@ import org.apache.orc.DataReader;
import org.apache.orc.OrcConf;
import org.apache.orc.OrcProto;
import org.apache.orc.OrcProto.BloomFilterIndex;
+import org.apache.orc.OrcProto.CalendarKind;
import org.apache.orc.OrcProto.FileTail;
import org.apache.orc.OrcProto.RowIndex;
import org.apache.orc.OrcProto.Stream;
@@ -798,7 +799,7 @@ public class OrcEncodedDataReader extends CallableWithNdc<Void>
sargApp = new RecordReaderImpl.SargApplier(sarg,
rowIndexStride, evolution,
OrcFile.WriterVersion.from(OrcFile.WriterImplementation.ORC_JAVA, fileMetadata.getWriterVersionNum()),
- true);
+ true, fileMetadata.getCalendar() == CalendarKind.PROLEPTIC_GREGORIAN, true);
}
boolean hasAnyData = false;
// stripeRgs should have been initialized by this time with an empty array.
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
index d6b16ef..7191e16 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/ConsumerFileMetadata.java
@@ -22,6 +22,7 @@ import java.util.List;
import org.apache.orc.CompressionKind;
import org.apache.orc.FileFormatException;
import org.apache.orc.OrcFile;
+import org.apache.orc.OrcProto.CalendarKind;
import org.apache.orc.OrcProto.Type;
import org.apache.orc.TypeDescription;
@@ -31,4 +32,5 @@ public interface ConsumerFileMetadata {
List<Type> getTypes();
TypeDescription getSchema() throws FileFormatException;
OrcFile.Version getFileVersion();
+ CalendarKind getCalendar();
}
diff --git a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
index 5eb713c..5b5bde9 100644
--- a/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
+++ b/llap-server/src/java/org/apache/hadoop/hive/llap/io/metadata/OrcFileMetadata.java
@@ -26,6 +26,7 @@ import org.apache.orc.FileFormatException;
import org.apache.orc.FileMetadata;
import org.apache.orc.OrcFile;
import org.apache.orc.OrcProto;
+import org.apache.orc.OrcProto.CalendarKind;
import org.apache.orc.OrcProto.StripeStatistics;
import org.apache.orc.OrcUtils;
import org.apache.orc.StripeInformation;
@@ -51,6 +52,7 @@ public final class OrcFileMetadata implements FileMetadata, ConsumerFileMetadata
private final long numberOfRows;
private final boolean isOriginalFormat;
private final OrcFile.Version fileVersion;
+ private final CalendarKind calendar;
public OrcFileMetadata(Object fileKey, OrcProto.Footer footer, OrcProto.PostScript ps,
List<StripeStatistics> stats, List<StripeInformation> stripes, final OrcFile.Version fileVersion) {
@@ -69,6 +71,7 @@ public final class OrcFileMetadata implements FileMetadata, ConsumerFileMetadata
this.fileStats = footer.getStatisticsList();
this.fileKey = fileKey;
this.fileVersion = fileVersion;
+ this.calendar = footer.getCalendar();
}
// FileMetadata
@@ -170,4 +173,9 @@ public final class OrcFileMetadata implements FileMetadata, ConsumerFileMetadata
public OrcFile.Version getFileVersion() {
return fileVersion;
}
+
+ @Override
+ public CalendarKind getCalendar() {
+ return calendar;
+ }
}
diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
index c5757f3..92442d1 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/filemeta/OrcFileMetadataHandler.java
@@ -51,7 +51,7 @@ public class OrcFileMetadataHandler extends FileMetadataHandler {
ByteBuffer metadata = metadatas[i].duplicate(); // Duplicate to avoid modification.
SplitInfos result = null;
try {
- result = getFileFormatProxy().applySargToMetadata(sarg, metadata);
+ result = getFileFormatProxy().applySargToMetadata(sarg, metadata, conf);
} catch (IOException ex) {
LOG.error("Failed to apply SARG to metadata", ex);
metadatas[i] = null;
diff --git a/pom.xml b/pom.xml
index 1d3c30a..a005f35 100644
--- a/pom.xml
+++ b/pom.xml
@@ -192,7 +192,7 @@
<log4j2.version>2.12.1</log4j2.version>
<mariadb.version>2.5.0</mariadb.version>
<opencsv.version>2.3</opencsv.version>
- <orc.version>1.5.8</orc.version>
+ <orc.version>1.5.9</orc.version>
<mockito-core.version>1.10.19</mockito-core.version>
<powermock.version>1.7.4</powermock.version>
<mina.version>2.0.0-M5</mina.version>
diff --git a/ql/pom.xml b/ql/pom.xml
index 3632a5e..8b0c02b 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -977,6 +977,7 @@
<include>net.sf.opencsv:opencsv</include>
<include>org.apache.hive:hive-spark-client</include>
<include>org.apache.hive:hive-storage-api</include>
+ <include>org.threeten:threeten-extra</include>
<include>org.apache.orc:orc-core</include>
<include>org.apache.orc:orc-shims</include>
<include>org.apache.orc:orc-tools</include>
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
index 6cccd9e..9bb59bc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
@@ -137,9 +137,10 @@ public class VectorizedBatchUtil {
case SHORT:
case INT:
case LONG:
- case DATE:
case INTERVAL_YEAR_MONTH:
return new LongColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
+ case DATE:
+ return new DateColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
case TIMESTAMP:
return new TimestampColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
case INTERVAL_DAY_TIME:
@@ -574,13 +575,14 @@ public class VectorizedBatchUtil {
return typeInfoList.toArray(new TypeInfo[0]);
}
- public static ColumnVector makeLikeColumnVector(ColumnVector source
- ) throws HiveException{
+ public static ColumnVector makeLikeColumnVector(ColumnVector source) throws HiveException{
if (source instanceof Decimal64ColumnVector) {
Decimal64ColumnVector dec64ColVector = (Decimal64ColumnVector) source;
return new Decimal64ColumnVector(dec64ColVector.vector.length,
dec64ColVector.precision,
dec64ColVector.scale);
+ } else if (source instanceof DateColumnVector) {
+ return new DateColumnVector(((DateColumnVector) source).vector.length);
} else if (source instanceof LongColumnVector) {
return new LongColumnVector(((LongColumnVector) source).vector.length);
} else if (source instanceof DoubleColumnVector) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java
index be7d8b7..fd10e08 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroContainerOutputFormat.java
@@ -31,6 +31,7 @@ import org.apache.avro.file.CodecFactory;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde2.avro.AvroSerDe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -80,6 +81,8 @@ public class AvroContainerOutputFormat
// add writer.time.zone property to file metadata
dfw.setMeta(AvroSerDe.WRITER_TIME_ZONE, TimeZone.getDefault().toZoneId().toString());
+ dfw.setMeta(AvroSerDe.WRITER_PROLEPTIC, String.valueOf(
+ HiveConf.getBoolVar(jobConf, HiveConf.ConfVars.HIVE_AVRO_PROLEPTIC_GREGORIAN)));
dfw.create(schema, path.getFileSystem(jobConf).create(path));
return new AvroGenericRecordWriter(dfw);
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
index 1927e0e..f27cb23 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/avro/AvroGenericRecordReader.java
@@ -64,6 +64,7 @@ public class AvroGenericRecordReader implements
final private long start;
final private long stop;
private ZoneId writerTimezone;
+ private Boolean writerProleptic;
protected JobConf jobConf;
final private boolean isEmptyInput;
/**
@@ -102,6 +103,7 @@ public class AvroGenericRecordReader implements
this.recordReaderID = new UID();
this.writerTimezone = extractWriterTimezoneFromMetadata(job, split, gdr);
+ this.writerProleptic = extractWriterProlepticFromMetadata(job, split, gdr);
}
/**
@@ -171,6 +173,28 @@ public class AvroGenericRecordReader implements
return null;
}
+ private Boolean extractWriterProlepticFromMetadata(JobConf job, FileSplit split,
+ GenericDatumReader<GenericRecord> gdr) throws IOException {
+ if (job == null || gdr == null || split == null || split.getPath() == null) {
+ return null;
+ }
+ try {
+ DataFileReader<GenericRecord> dataFileReader =
+ new DataFileReader<GenericRecord>(new FsInput(split.getPath(), job), gdr);
+ if (dataFileReader.getMeta(AvroSerDe.WRITER_PROLEPTIC) != null) {
+ try {
+ return Boolean.valueOf(new String(dataFileReader.getMeta(AvroSerDe.WRITER_PROLEPTIC),
+ StandardCharsets.UTF_8));
+ } catch (DateTimeException e) {
+ throw new RuntimeException("Can't parse writer proleptic property stored in file metadata", e);
+ }
+ }
+ } catch (IOException e) {
+ // Can't access metadata, carry on.
+ }
+ return null;
+ }
+
private boolean pathIsInPartition(Path split, Path partitionPath) {
boolean schemeless = split.toUri().getScheme() == null;
if (schemeless) {
@@ -203,7 +227,7 @@ public class AvroGenericRecordReader implements
@Override
public AvroGenericRecordWritable createValue() {
- return new AvroGenericRecordWritable(writerTimezone);
+ return new AvroGenericRecordWritable(writerTimezone, writerProleptic);
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java
index bb75ebf..3139b10 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ExternalCache.java
@@ -43,6 +43,8 @@ import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatusWithId;
+import org.apache.orc.OrcConf;
+import org.apache.orc.OrcProto;
import org.apache.orc.impl.OrcTail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -311,7 +313,7 @@ public class ExternalCache implements FooterCache {
try {
OrcTail orcTail = ReaderImpl.extractFileTail(copy, fs.getLen(), fs.getModificationTime());
// trigger lazy read of metadata to make sure serialized data is not corrupted and readable
- orcTail.getStripeStatistics();
+ orcTail.getStripeStatistics(false, false);
return orcTail;
} catch (Exception ex) {
byte[] data = new byte[bb.remaining()];
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
index e246ac2..349eb25 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
@@ -68,6 +68,7 @@ public final class OrcFile extends org.apache.orc.OrcFile {
public ReaderOptions(Configuration conf) {
super(conf);
useUTCTimestamp(true);
+ convertToProlepticGregorian(true);
}
public ReaderOptions filesystem(FileSystem fs) {
@@ -94,6 +95,11 @@ public final class OrcFile extends org.apache.orc.OrcFile {
super.useUTCTimestamp(value);
return this;
}
+
+ public ReaderOptions convertToProlepticGregorian(boolean value) {
+ super.convertToProlepticGregorian(value);
+ return this;
+ }
}
public static ReaderOptions readerOptions(Configuration conf) {
@@ -331,6 +337,11 @@ public final class OrcFile extends org.apache.orc.OrcFile {
return this;
}
+ public WriterOptions setProlepticGregorian(boolean value) {
+ super.setProlepticGregorian(value);
+ return this;
+ }
+
ObjectInspector getInspector() {
return inspector;
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java
index 11f27df..148b50d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFileFormatProxy.java
@@ -21,15 +21,16 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.FileFormatProxy;
import org.apache.hadoop.hive.metastore.Metastore.SplitInfo;
import org.apache.hadoop.hive.metastore.Metastore.SplitInfos;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
+import org.apache.orc.OrcConf;
import org.apache.orc.OrcProto;
import org.apache.orc.StripeInformation;
-import org.apache.orc.StripeStatistics;
import org.apache.orc.impl.OrcTail;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -40,15 +41,19 @@ public class OrcFileFormatProxy implements FileFormatProxy {
@Override
public SplitInfos applySargToMetadata(
- SearchArgument sarg, ByteBuffer fileMetadata) throws IOException {
+ SearchArgument sarg, ByteBuffer fileMetadata, Configuration conf) throws IOException {
// TODO: ideally we should store shortened representation of only the necessary fields
// in HBase; it will probably require custom SARG application code.
OrcTail orcTail = ReaderImpl.extractFileTail(fileMetadata);
OrcProto.Footer footer = orcTail.getFooter();
int stripeCount = footer.getStripesCount();
+ boolean writerUsedProlepticGregorian = footer.hasCalendar()
+ ? footer.getCalendar() == OrcProto.CalendarKind.PROLEPTIC_GREGORIAN
+ : OrcConf.PROLEPTIC_GREGORIAN_DEFAULT.getBoolean(conf);
boolean[] result = OrcInputFormat.pickStripesViaTranslatedSarg(
sarg, orcTail.getWriterVersion(),
- footer.getTypesList(), orcTail.getStripeStatistics(), stripeCount);
+ footer.getTypesList(), orcTail.getStripeStatistics(writerUsedProlepticGregorian, true),
+ stripeCount);
// For ORC case, send the boundaries of the stripes so we don't have to send the footer.
SplitInfos.Builder sb = SplitInfos.newBuilder();
List<StripeInformation> stripes = orcTail.getStripes();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
index 76984ab..a069032 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
@@ -123,6 +123,7 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.Ref;
import org.apache.orc.ColumnStatistics;
import org.apache.orc.FileFormatException;
+import org.apache.orc.OrcConf;
import org.apache.orc.OrcProto;
import org.apache.orc.OrcProto.Footer;
import org.apache.orc.OrcUtils;
@@ -1732,7 +1733,11 @@ public class OrcInputFormat implements InputFormat<NullWritable, OrcStruct>,
stripeStats = orcReader.getStripeStatistics();
} else {
stripes = orcTail.getStripes();
- stripeStats = orcTail.getStripeStatistics();
+ OrcProto.Footer footer = orcTail.getFooter();
+ boolean writerUsedProlepticGregorian = footer.hasCalendar()
+ ? footer.getCalendar() == OrcProto.CalendarKind.PROLEPTIC_GREGORIAN
+ : OrcConf.PROLEPTIC_GREGORIAN_DEFAULT.getBoolean(context.conf);
+ stripeStats = orcTail.getStripeStatistics(writerUsedProlepticGregorian, true);
}
fileTypes = orcTail.getTypes();
TypeDescription fileSchema = OrcUtils.convertTypeFromProtobuf(fileTypes, 0);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
index 889bd58..d0a6c6e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DateColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -433,7 +434,7 @@ public class RecordReaderImpl extends org.apache.orc.impl.RecordReaderImpl
} else {
result = (DateWritableV2) previous;
}
- int date = (int) ((LongColumnVector) vector).vector[row];
+ int date = (int) ((DateColumnVector) vector).vector[row];
result.set(date);
return result;
} else {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
index 4082c61..58a0c54 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DateColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.Decimal64ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -204,12 +205,14 @@ public class WriterImpl extends org.apache.orc.impl.WriterImpl implements Writer
case TIMESTAMP: {
TimestampColumnVector vector = (TimestampColumnVector) column;
vector.setIsUTC(true);
+ vector.setUsingProlepticCalendar(true);
vector.set(rowId, ((TimestampObjectInspector) inspector)
.getPrimitiveJavaObject(obj).toSqlTimestamp());
break;
}
case DATE: {
- LongColumnVector vector = (LongColumnVector) column;
+ DateColumnVector vector = (DateColumnVector) column;
+ vector.setUsingProlepticCalendar(true);
vector.vector[rowId] = ((DateObjectInspector) inspector)
.getPrimitiveWritableObject(obj).getDays();
break;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ParquetRecordReaderBase.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ParquetRecordReaderBase.java
index 91a02fe..577051d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ParquetRecordReaderBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/ParquetRecordReaderBase.java
@@ -54,6 +54,7 @@ public class ParquetRecordReaderBase {
protected Path file;
protected ProjectionPusher projectionPusher;
protected boolean skipTimestampConversion = false;
+ protected Boolean skipProlepticConversion;
protected SerDeStats serDeStats;
protected JobConf jobConf;
@@ -130,6 +131,13 @@ public class ParquetRecordReaderBase {
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION)) {
skipTimestampConversion = !Strings.nullToEmpty(fileMetaData.getCreatedBy()).startsWith("parquet-mr");
}
+ skipProlepticConversion = DataWritableReadSupport
+ .getWriterDateProleptic(fileMetaData.getKeyValueMetaData());
+ if (skipProlepticConversion == null) {
+ skipProlepticConversion = HiveConf.getBoolVar(
+ conf, HiveConf.ConfVars.HIVE_PARQUET_DATE_PROLEPTIC_GREGORIAN_DEFAULT);
+ }
+
split = new ParquetInputSplit(finalPath,
splitStart,
splitLength,
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
index 490b71e..6082321 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/ETypeConverter.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.io.parquet.read.DataWritableReadSupport;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.ParquetTimestampUtils;
+import org.apache.hadoop.hive.common.type.CalendarUtils;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -706,7 +707,14 @@ public enum ETypeConverter {
return new PrimitiveConverter() {
@Override
public void addInt(final int value) {
- parent.set(index, new DateWritableV2(value));
+ Map<String, String> metadata = parent.getMetadata();
+ Boolean skipProlepticConversion = DataWritableReadSupport.getWriterDateProleptic(metadata);
+ if (skipProlepticConversion == null) {
+ skipProlepticConversion = Boolean.parseBoolean(
+ metadata.get(HiveConf.ConfVars.HIVE_PARQUET_DATE_PROLEPTIC_GREGORIAN_DEFAULT.varname));
+ }
+ parent.set(index,
+ new DateWritableV2(skipProlepticConversion ? value : CalendarUtils.convertDateToProleptic(value)));
}
};
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
index d3245fc..ba146c5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/read/DataWritableReadSupport.java
@@ -25,6 +25,7 @@ import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.io.IOConstants;
import org.apache.hadoop.hive.ql.io.parquet.convert.DataWritableRecordConverter;
import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
@@ -285,6 +286,25 @@ public class DataWritableReadSupport extends ReadSupport<ArrayWritable> {
}
/**
+ * Get the proleptic from some metadata, otherwise return null.
+ */
+ public static Boolean getWriterDateProleptic(Map<String, String> metadata) {
+ if (metadata == null) {
+ return null;
+ }
+ String value = metadata.get(DataWritableWriteSupport.WRITER_DATE_PROLEPTIC);
+ try {
+ if (value != null) {
+ return Boolean.valueOf(value);
+ }
+ } catch (DateTimeException e) {
+ throw new RuntimeException("Can't parse writer proleptic property stored in file metadata", e);
+ }
+
+ return null;
+ }
+
+ /**
* Return the columns which contains required nested attribute level
* E.g., given struct a:<x:int, y:int> while 'x' is required and 'y' is not, the method will return
* a pruned struct for 'a' which only contains the attribute 'x'
@@ -487,6 +507,22 @@ public class DataWritableReadSupport extends ReadSupport<ArrayWritable> {
+ "file footer's writer time zone.");
}
+ String writerProleptic = DataWritableWriteSupport.WRITER_DATE_PROLEPTIC;
+ if (!metadata.containsKey(writerProleptic)) {
+ if (keyValueMetaData.containsKey(writerProleptic)) {
+ metadata.put(writerProleptic, keyValueMetaData.get(writerProleptic));
+ }
+ } else if (!metadata.get(writerProleptic).equals(keyValueMetaData.get(writerProleptic))) {
+ throw new IllegalStateException("Metadata contains a writer proleptic property value that does not match "
+ + "file footer's value.");
+ }
+
+ String prolepticDefault = ConfVars.HIVE_PARQUET_DATE_PROLEPTIC_GREGORIAN_DEFAULT.varname;
+ if (!metadata.containsKey(prolepticDefault)) {
+ metadata.put(prolepticDefault, String.valueOf(HiveConf.getBoolVar(
+ configuration, HiveConf.ConfVars.HIVE_PARQUET_DATE_PROLEPTIC_GREGORIAN_DEFAULT)));
+ }
+
return new DataWritableRecordConverter(readContext.getRequestedSchema(), metadata, hiveTypeInfo);
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
index f1cce81..f9d0a56 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/timestamp/NanoTimeUtils.java
@@ -16,6 +16,7 @@ package org.apache.hadoop.hive.ql.io.parquet.timestamp;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.util.Calendar;
+import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
@@ -40,7 +41,10 @@ public class NanoTimeUtils {
private static Calendar getGMTCalendar() {
//Calendar.getInstance calculates the current-time needlessly, so cache an instance.
if (parquetGMTCalendar.get() == null) {
- parquetGMTCalendar.set(Calendar.getInstance(TimeZone.getTimeZone("GMT")));
+ GregorianCalendar calendar = new GregorianCalendar();
+ calendar.setTimeZone(TimeZone.getTimeZone("GMT"));
+ calendar.setGregorianChange(new Date(Long.MIN_VALUE));
+ parquetGMTCalendar.set(calendar);
}
parquetGMTCalendar.get().clear();
return parquetGMTCalendar.get();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/BaseVectorizedColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/BaseVectorizedColumnReader.java
index 05d85ca..8d3cb7c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/BaseVectorizedColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/BaseVectorizedColumnReader.java
@@ -55,6 +55,7 @@ public abstract class BaseVectorizedColumnReader implements VectorizedColumnRead
protected boolean skipTimestampConversion = false;
protected ZoneId writerTimezone = null;
+ protected boolean skipProlepticConversion = false;
/**
* Total number of values read.
@@ -119,6 +120,7 @@ public abstract class BaseVectorizedColumnReader implements VectorizedColumnRead
PageReader pageReader,
boolean skipTimestampConversion,
ZoneId writerTimezone,
+ boolean skipProlepticConversion,
Type parquetType, TypeInfo hiveType) throws IOException {
this.descriptor = descriptor;
this.type = parquetType;
@@ -126,6 +128,7 @@ public abstract class BaseVectorizedColumnReader implements VectorizedColumnRead
this.maxDefLevel = descriptor.getMaxDefinitionLevel();
this.skipTimestampConversion = skipTimestampConversion;
this.writerTimezone = writerTimezone;
+ this.skipProlepticConversion = skipProlepticConversion;
this.hiveType = hiveType;
DictionaryPage dictionaryPage = pageReader.readDictionaryPage();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedListColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedListColumnReader.java
index 5d16159..6136ce0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedListColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedListColumnReader.java
@@ -49,9 +49,9 @@ public class VectorizedListColumnReader extends BaseVectorizedColumnReader {
boolean isFirstRow = true;
public VectorizedListColumnReader(ColumnDescriptor descriptor, PageReader pageReader,
- boolean skipTimestampConversion, ZoneId writerTimezone, Type type, TypeInfo hiveType)
- throws IOException {
- super(descriptor, pageReader, skipTimestampConversion, writerTimezone, type, hiveType);
+ boolean skipTimestampConversion, ZoneId writerTimezone, boolean skipProlepticConversion,
+ Type type, TypeInfo hiveType) throws IOException {
+ super(descriptor, pageReader, skipTimestampConversion, writerTimezone, skipProlepticConversion, type, hiveType);
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedParquetRecordReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedParquetRecordReader.java
index ea6dfb8..2104746 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedParquetRecordReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedParquetRecordReader.java
@@ -456,13 +456,13 @@ public class VectorizedParquetRecordReader extends ParquetRecordReaderBase
for (int i = 0; i < types.size(); ++i) {
columnReaders[i] =
buildVectorizedParquetReader(columnTypesList.get(colsToInclude.get(i)), types.get(i),
- pages, requestedSchema.getColumns(), skipTimestampConversion, writerTimezone, 0);
+ pages, requestedSchema.getColumns(), skipTimestampConversion, writerTimezone, skipProlepticConversion, 0);
}
}
} else {
for (int i = 0; i < types.size(); ++i) {
columnReaders[i] = buildVectorizedParquetReader(columnTypesList.get(i), types.get(i), pages,
- requestedSchema.getColumns(), skipTimestampConversion, writerTimezone, 0);
+ requestedSchema.getColumns(), skipTimestampConversion, writerTimezone, skipProlepticConversion, 0);
}
}
@@ -506,6 +506,7 @@ public class VectorizedParquetRecordReader extends ParquetRecordReaderBase
List<ColumnDescriptor> columnDescriptors,
boolean skipTimestampConversion,
ZoneId writerTimezone,
+ boolean skipProlepticConversion,
int depth) throws IOException {
List<ColumnDescriptor> descriptors =
getAllColumnDescriptorByType(depth, type, columnDescriptors);
@@ -517,8 +518,8 @@ public class VectorizedParquetRecordReader extends ParquetRecordReaderBase
}
if (fileSchema.getColumns().contains(descriptors.get(0))) {
return new VectorizedPrimitiveColumnReader(descriptors.get(0),
- pages.getPageReader(descriptors.get(0)), skipTimestampConversion, writerTimezone, type,
- typeInfo);
+ pages.getPageReader(descriptors.get(0)), skipTimestampConversion, writerTimezone, skipProlepticConversion,
+ type, typeInfo);
} else {
// Support for schema evolution
return new VectorizedDummyColumnReader();
@@ -531,7 +532,7 @@ public class VectorizedParquetRecordReader extends ParquetRecordReaderBase
for (int i = 0; i < fieldTypes.size(); i++) {
VectorizedColumnReader r =
buildVectorizedParquetReader(fieldTypes.get(i), types.get(i), pages, descriptors,
- skipTimestampConversion, writerTimezone, depth + 1);
+ skipTimestampConversion, writerTimezone, skipProlepticConversion, depth + 1);
if (r != null) {
fieldReaders.add(r);
} else {
@@ -549,9 +550,8 @@ public class VectorizedParquetRecordReader extends ParquetRecordReaderBase
}
return new VectorizedListColumnReader(descriptors.get(0),
- pages.getPageReader(descriptors.get(0)), skipTimestampConversion, writerTimezone,
- getElementType(type),
- typeInfo);
+ pages.getPageReader(descriptors.get(0)), skipTimestampConversion, writerTimezone, skipProlepticConversion,
+ getElementType(type), typeInfo);
case MAP:
if (columnDescriptors == null || columnDescriptors.isEmpty()) {
throw new RuntimeException(
@@ -583,10 +583,10 @@ public class VectorizedParquetRecordReader extends ParquetRecordReaderBase
List<Type> kvTypes = groupType.getFields();
VectorizedListColumnReader keyListColumnReader = new VectorizedListColumnReader(
descriptors.get(0), pages.getPageReader(descriptors.get(0)), skipTimestampConversion,
- writerTimezone, kvTypes.get(0), typeInfo);
+ writerTimezone, skipProlepticConversion, kvTypes.get(0), typeInfo);
VectorizedListColumnReader valueListColumnReader = new VectorizedListColumnReader(
descriptors.get(1), pages.getPageReader(descriptors.get(1)), skipTimestampConversion,
- writerTimezone, kvTypes.get(1), typeInfo);
+ writerTimezone, skipProlepticConversion, kvTypes.get(1), typeInfo);
return new VectorizedMapColumnReader(keyListColumnReader, valueListColumnReader);
case UNION:
default:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
index 26ce573..62a94bc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/vector/VectorizedPrimitiveColumnReader.java
@@ -15,10 +15,12 @@ package org.apache.hadoop.hive.ql.io.parquet.vector;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DateColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.common.type.CalendarUtils;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -48,10 +50,11 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
PageReader pageReader,
boolean skipTimestampConversion,
ZoneId writerTimezone,
+ boolean skipProlepticConversion,
Type type,
TypeInfo hiveType)
throws IOException {
- super(descriptor, pageReader, skipTimestampConversion, writerTimezone, type, hiveType);
+ super(descriptor, pageReader, skipTimestampConversion, writerTimezone, skipProlepticConversion, type, hiveType);
}
@Override
@@ -101,6 +104,8 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
readSmallInts(num, (LongColumnVector) column, rowId);
break;
case DATE:
+ readDate(num, (DateColumnVector) column, rowId);
+ break;
case INTERVAL_YEAR_MONTH:
case LONG:
readLongs(num, (LongColumnVector) column, rowId);
@@ -438,7 +443,34 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
}
}
+ private void readDate(
+ int total,
+ DateColumnVector c,
+ int rowId) throws IOException {
+ c.setUsingProlepticCalendar(true);
+ int left = total;
+ while (left > 0) {
+ readRepetitionAndDefinitionLevels();
+ if (definitionLevel >= maxDefLevel) {
+ c.vector[rowId] = skipProlepticConversion ?
+ dataColumn.readLong() : CalendarUtils.convertDateToProleptic((int) dataColumn.readLong());
+ if (dataColumn.isValid()) {
+ c.isNull[rowId] = false;
+ c.isRepeating = c.isRepeating && (c.vector[0] == c.vector[rowId]);
+ } else {
+ c.vector[rowId] = 0;
+ setNullValue(c, rowId);
+ }
+ } else {
+ setNullValue(c, rowId);
+ }
+ rowId++;
+ left--;
+ }
+ }
+
private void readTimestamp(int total, TimestampColumnVector c, int rowId) throws IOException {
+ c.setUsingProlepticCalendar(true);
int left = total;
while (left > 0) {
readRepetitionAndDefinitionLevels();
@@ -516,6 +548,19 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
}
break;
case DATE:
+ DateColumnVector dc = (DateColumnVector) column;
+ dc.setUsingProlepticCalendar(true);
+ for (int i = rowId; i < rowId + num; ++i) {
+ dc.vector[i] =
+ skipProlepticConversion ?
+ dictionary.readLong((int) dictionaryIds.vector[i]) :
+ CalendarUtils.convertDateToProleptic((int) dictionary.readLong((int) dictionaryIds.vector[i]));
+ if (!dictionary.isValid()) {
+ setNullValue(column, i);
+ dc.vector[i] = 0;
+ }
+ }
+ break;
case INTERVAL_YEAR_MONTH:
case LONG:
for (int i = rowId; i < rowId + num; ++i) {
@@ -597,9 +642,10 @@ public class VectorizedPrimitiveColumnReader extends BaseVectorizedColumnReader
}
break;
case TIMESTAMP:
+ TimestampColumnVector tsc = (TimestampColumnVector) column;
+ tsc.setUsingProlepticCalendar(true);
for (int i = rowId; i < rowId + num; ++i) {
- ((TimestampColumnVector) column)
- .set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]).toSqlTimestamp());
+ tsc.set(i, dictionary.readTimestamp((int) dictionaryIds.vector[i]).toSqlTimestamp());
}
break;
case INTERVAL_DAY_TIME:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriteSupport.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriteSupport.java
index 8acde81..f4212f44 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriteSupport.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriteSupport.java
@@ -18,6 +18,7 @@ import java.util.Map;
import java.util.TimeZone;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
import org.apache.hive.common.util.HiveVersionInfo;
@@ -35,9 +36,11 @@ public class DataWritableWriteSupport extends WriteSupport<ParquetHiveRecord> {
public static final String PARQUET_HIVE_SCHEMA = "parquet.hive.schema";
public static final String WRITER_TIMEZONE = "writer.time.zone";
+ public static final String WRITER_DATE_PROLEPTIC = "writer.date.proleptic";
private DataWritableWriter writer;
private MessageType schema;
+ private boolean defaultDateProleptic;
public static void setSchema(final MessageType schema, final Configuration configuration) {
configuration.set(PARQUET_HIVE_SCHEMA, schema.toString());
@@ -52,12 +55,15 @@ public class DataWritableWriteSupport extends WriteSupport<ParquetHiveRecord> {
schema = getSchema(configuration);
Map<String, String> metaData = new HashMap<>();
metaData.put(WRITER_TIMEZONE, TimeZone.getDefault().toZoneId().toString());
+ defaultDateProleptic = HiveConf.getBoolVar(
+ configuration, HiveConf.ConfVars.HIVE_PARQUET_DATE_PROLEPTIC_GREGORIAN);
+ metaData.put(WRITER_DATE_PROLEPTIC, String.valueOf(defaultDateProleptic));
return new WriteContext(schema, metaData);
}
@Override
public void prepareForWrite(final RecordConsumer recordConsumer) {
- writer = new DataWritableWriter(recordConsumer, schema);
+ writer = new DataWritableWriter(recordConsumer, schema, defaultDateProleptic);
}
@Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
index bd519eb..1834008 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/write/DataWritableWriter.java
@@ -18,6 +18,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTimeUtils;
+import org.apache.hadoop.hive.common.type.CalendarUtils;
import org.apache.hadoop.hive.serde2.io.DateWritableV2;
import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -64,14 +65,17 @@ public class DataWritableWriter {
private static final Logger LOG = LoggerFactory.getLogger(DataWritableWriter.class);
protected final RecordConsumer recordConsumer;
private final GroupType schema;
+ private final boolean defaultDateProleptic;
/* This writer will be created when writing the first row in order to get
information about how to inspect the record data. */
private DataWriter messageWriter;
- public DataWritableWriter(final RecordConsumer recordConsumer, final GroupType schema) {
+ public DataWritableWriter(final RecordConsumer recordConsumer, final GroupType schema,
+ final boolean defaultDateProleptic) {
this.recordConsumer = recordConsumer;
this.schema = schema;
+ this.defaultDateProleptic = defaultDateProleptic;
}
/**
@@ -552,7 +556,9 @@ public class DataWritableWriter {
@Override
public void write(Object value) {
Date vDate = inspector.getPrimitiveJavaObject(value);
- recordConsumer.addInteger(DateWritableV2.dateToDays(vDate));
+ recordConsumer.addInteger(
+ defaultDateProleptic ? DateWritableV2.dateToDays(vDate) :
+ CalendarUtils.convertDateToHybrid(DateWritableV2.dateToDays(vDate)));
}
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
index bfabdce..764c401 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/sarg/ConvertAstToSearchArg.java
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotEqual;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -204,12 +205,21 @@ public class ConvertAstToSearchArg {
}
return fl.doubleValue();
case TIMESTAMP:
- if (lit instanceof org.apache.hadoop.hive.common.type.Timestamp) {
- return ((org.apache.hadoop.hive.common.type.Timestamp) lit).toSqlTimestamp();
+ final Timestamp ts;
+ if (lit instanceof Timestamp) {
+ ts = (Timestamp) lit;
+ } else if (lit instanceof org.apache.hadoop.hive.common.type.Timestamp) {
+ ts = ((org.apache.hadoop.hive.common.type.Timestamp) lit)
+ .toSqlTimestamp();
+ } else {
+ ts = org.apache.hadoop.hive.common.type.Timestamp.valueOf(lit.toString())
+ .toSqlTimestamp();
}
- return Timestamp.valueOf(lit.toString());
+ return ts;
case DATE:
- return Date.valueOf(lit.toString());
+ return new Date(
+ DateWritable.daysToMillis(
+ org.apache.hadoop.hive.common.type.Date.valueOf(lit.toString()).toEpochDay()));
case DECIMAL:
return new HiveDecimalWritable(lit.toString());
case BOOLEAN:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java
index 5818e6b..4bf05f3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/FixedBucketPruningOptimizer.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
@@ -251,7 +252,9 @@ public class FixedBucketPruningOptimizer extends Transform {
// This is a bit hackish to fix mismatch between SARG and Hive types
// for Timestamp and Date. TODO: Move those types to storage-api.
if (o instanceof java.sql.Date) {
- return Date.valueOf(o.toString());
+ java.sql.Date sqlDate = (java.sql.Date)o;
+ return Date.ofEpochDay(
+ DateWritable.millisToDays(sqlDate.getTime()));
} else if (o instanceof java.sql.Timestamp) {
java.sql.Timestamp sqlTimestamp = (java.sql.Timestamp)o;
return Timestamp.ofEpochMilli(sqlTimestamp.getTime(), sqlTimestamp.getNanos());
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorBatchGenerator.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorBatchGenerator.java
index ff88841..bb149bc 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorBatchGenerator.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/batchgen/VectorBatchGenerator.java
@@ -23,6 +23,7 @@ import java.util.Random;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DateColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -192,10 +193,13 @@ public class VectorBatchGenerator {
case SHORT:
case INT:
case LONG:
- case DATE:
colVector = new LongColumnVector();
break;
+ case DATE:
+ colVector = new DateColumnVector();
+ break;
+
case FLOAT:
case DOUBLE:
colVector = new DoubleColumnVector();
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index e9f0b98..5f5ea4e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -2585,14 +2585,14 @@ public class TestInputOutputFormat {
assertEquals("mock:/combinationAcid/p=0/base_0000010/bucket_00000",
split.getPath().toString());
assertEquals(0, split.getStart());
- assertEquals(700, split.getLength());
+ assertEquals(702, split.getLength());
split = (HiveInputFormat.HiveInputSplit) splits[1];
assertEquals("org.apache.hadoop.hive.ql.io.orc.OrcInputFormat",
split.inputFormatClassName());
assertEquals("mock:/combinationAcid/p=0/base_0000010/bucket_00001",
split.getPath().toString());
assertEquals(0, split.getStart());
- assertEquals(724, split.getLength());
+ assertEquals(726, split.getLength());
CombineHiveInputFormat.CombineHiveInputSplit combineSplit =
(CombineHiveInputFormat.CombineHiveInputSplit) splits[2];
assertEquals(BUCKETS, combineSplit.getNumPaths());
@@ -2600,7 +2600,7 @@ public class TestInputOutputFormat {
assertEquals("mock:/combinationAcid/p=1/00000" + bucket + "_0",
combineSplit.getPath(bucket).toString());
assertEquals(0, combineSplit.getOffset(bucket));
- assertEquals(251, combineSplit.getLength(bucket));
+ assertEquals(253, combineSplit.getLength(bucket));
}
String[] hosts = combineSplit.getLocations();
assertEquals(2, hosts.length);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
index 220431a..154fe12 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
@@ -1312,7 +1312,8 @@ public class TestOrcFile {
.inspector(inspector)
.stripeSize(100000)
.bufferSize(10000)
- .blockPadding(false));
+ .blockPadding(false)
+ .setProlepticGregorian(true));
OrcStruct row = new OrcStruct(2);
for (int year = minYear; year < maxYear; ++year) {
for (int ms = 1000; ms < 2000; ++ms) {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/encoded/TestEncodedOrcFile.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/encoded/TestEncodedOrcFile.java
index 8264723..1ae7ab0 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/encoded/TestEncodedOrcFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/encoded/TestEncodedOrcFile.java
@@ -26,6 +26,7 @@ import org.apache.hive.common.util.MockFileSystem;
import org.apache.orc.CompressionKind;
import org.apache.orc.FileMetadata;
import org.apache.orc.OrcProto;
+import org.apache.orc.OrcProto.Footer;
import org.apache.orc.impl.OrcTail;
import org.junit.Test;
@@ -50,16 +51,16 @@ public class TestEncodedOrcFile {
conf.set("fs.defaultFS", "fmock:///");
conf.set("fs.mock.impl", FailingMockFileSystem.class.getName());
- List<OrcProto.Type> types = new ArrayList<>();
- types.add(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.BINARY).build());
- FileMetadata dummyMetadata = mock(FileMetadata.class);
- when(dummyMetadata.getTypes()).thenReturn(types);
- when(dummyMetadata.getCompressionKind()).thenReturn(CompressionKind.NONE);
+ OrcProto.FileTail tail = OrcProto.FileTail.newBuilder()
+ .setFooter(Footer.newBuilder()
+ .addTypes(OrcProto.Type.newBuilder().setKind(OrcProto.Type.Kind.BINARY).build())
+ .build())
+ .build();
OrcFile.ReaderOptions readerOptions = EncodedOrcFile.readerOptions(conf)
.filesystem(() -> {
throw new RuntimeException("Filesystem should not have been initialized");
- }).orcTail(new OrcTail(OrcProto.FileTail.getDefaultInstance(), null))
- .fileMetadata(dummyMetadata);
+ })
+ .orcTail(new OrcTail(tail, null));
// an orc reader is created, this should not cause filesystem initialization
// because orc tail is already provided and we are not making any real reads.
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
index b242392..01d9d2e 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestDataWritableWriter.java
@@ -192,7 +192,7 @@ public class TestDataWritableWriter {
private void writeParquetRecord(String schema, ParquetHiveRecord record) throws SerDeException {
MessageType fileSchema = MessageTypeParser.parseMessageType(schema);
- DataWritableWriter hiveParquetWriter = new DataWritableWriter(mockRecordConsumer, fileSchema);
+ DataWritableWriter hiveParquetWriter = new DataWritableWriter(mockRecordConsumer, fileSchema, false);
hiveParquetWriter.write(record);
}
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
index 9ea7850..bc5e560 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetTimestampUtils.java
@@ -15,9 +15,11 @@ package org.apache.hadoop.hive.ql.io.parquet.serde;
import java.time.ZoneId;
import java.util.Calendar;
+import java.util.GregorianCalendar;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
+import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZUtil;
import org.apache.hadoop.hive.ql.io.parquet.timestamp.NanoTime;
@@ -41,12 +43,13 @@ public class TestParquetTimestampUtils {
@Test
public void testJulianDay() {
//check if May 23, 1968 is Julian Day 2440000
- Calendar cal = Calendar.getInstance();
+ GregorianCalendar cal = new GregorianCalendar();
+ cal.setTimeZone(TimeZone.getTimeZone("GMT"));
+ cal.setGregorianChange(new java.util.Date(Long.MIN_VALUE));
cal.set(Calendar.YEAR, 1968);
cal.set(Calendar.MONTH, Calendar.MAY);
cal.set(Calendar.DAY_OF_MONTH, 23);
cal.set(Calendar.HOUR_OF_DAY, 0);
- cal.setTimeZone(TimeZone.getTimeZone("GMT"));
Timestamp ts = Timestamp.ofEpochMilli(cal.getTimeInMillis());
NanoTime nt = NanoTimeUtils.getNanoTime(ts, false);
@@ -56,12 +59,13 @@ public class TestParquetTimestampUtils {
Assert.assertEquals(tsFetched, ts);
//check if 30 Julian Days between Jan 1, 2005 and Jan 31, 2005.
- Calendar cal1 = Calendar.getInstance();
+ GregorianCalendar cal1 = new GregorianCalendar();
+ cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
+ cal1.setGregorianChange(new java.util.Date(Long.MIN_VALUE));
cal1.set(Calendar.YEAR, 2005);
cal1.set(Calendar.MONTH, Calendar.JANUARY);
cal1.set(Calendar.DAY_OF_MONTH, 1);
cal1.set(Calendar.HOUR_OF_DAY, 0);
- cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
Timestamp ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis());
NanoTime nt1 = NanoTimeUtils.getNanoTime(ts1, false);
@@ -69,12 +73,13 @@ public class TestParquetTimestampUtils {
Timestamp ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false);
Assert.assertEquals(ts1Fetched, ts1);
- Calendar cal2 = Calendar.getInstance();
+ GregorianCalendar cal2 = new GregorianCalendar();
+ cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
+ cal2.setGregorianChange(new java.util.Date(Long.MIN_VALUE));
cal2.set(Calendar.YEAR, 2005);
cal2.set(Calendar.MONTH, Calendar.JANUARY);
cal2.set(Calendar.DAY_OF_MONTH, 31);
cal2.set(Calendar.HOUR_OF_DAY, 0);
- cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
Timestamp ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis());
NanoTime nt2 = NanoTimeUtils.getNanoTime(ts2, false);
@@ -86,12 +91,13 @@ public class TestParquetTimestampUtils {
// check if 730517 Julian Days between Jan 1, 0005 and Jan 31, 2005.
// This method used to test Julian Days between Jan 1, 2005 BCE and Jan 1, 2005 CE. Since BCE
// timestamps are not supported, both dates were changed to CE.
- cal1 = Calendar.getInstance();
+ cal1 = new GregorianCalendar();
+ cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
+ cal1.setGregorianChange(new java.util.Date(Long.MIN_VALUE));
cal1.set(Calendar.YEAR, 0005);
cal1.set(Calendar.MONTH, Calendar.JANUARY);
cal1.set(Calendar.DAY_OF_MONTH, 1);
cal1.set(Calendar.HOUR_OF_DAY, 0);
- cal1.setTimeZone(TimeZone.getTimeZone("GMT"));
ts1 = Timestamp.ofEpochMilli(cal1.getTimeInMillis());
nt1 = NanoTimeUtils.getNanoTime(ts1, false);
@@ -99,20 +105,27 @@ public class TestParquetTimestampUtils {
ts1Fetched = NanoTimeUtils.getTimestamp(nt1, false);
Assert.assertEquals(ts1Fetched, ts1);
- cal2 = Calendar.getInstance();
+ cal2 = new GregorianCalendar();
+ cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
+ cal2.setGregorianChange(new java.util.Date(Long.MIN_VALUE));
cal2.set(Calendar.YEAR, 2005);
cal2.set(Calendar.MONTH, Calendar.JANUARY);
cal2.set(Calendar.DAY_OF_MONTH, 31);
cal2.set(Calendar.HOUR_OF_DAY, 0);
- cal2.setTimeZone(TimeZone.getTimeZone("UTC"));
ts2 = Timestamp.ofEpochMilli(cal2.getTimeInMillis());
nt2 = NanoTimeUtils.getNanoTime(ts2, false);
ts2Fetched = NanoTimeUtils.getTimestamp(nt2, false);
Assert.assertEquals(ts2Fetched, ts2);
- Assert.assertEquals(nt2.getJulianDay() - nt1.getJulianDay(), 730517);
-}
+ Assert.assertEquals(730517, nt2.getJulianDay() - nt1.getJulianDay());
+
+ Date d1 = Date.ofEpochMilli(cal1.getTimeInMillis());
+ Assert.assertEquals("0005-01-01", d1.toString());
+
+ Date d2 = Date.ofEpochMilli(cal2.getTimeInMillis());
+ Assert.assertEquals("2005-01-31", d2.toString());
+ }
@Test
public void testNanos() {
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
index 08a24e5..a52426a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/sarg/TestConvertAstToSearchArg.java
@@ -2753,7 +2753,7 @@ public class TestConvertAstToSearchArg {
assertEquals(1, sarg.getLeaves().size());
PredicateLeaf leaf = sarg.getLeaves().get(0);
assertEquals(PredicateLeaf.Type.TIMESTAMP, leaf.getType());
- assertEquals("(EQUALS ts 2015-03-17 12:34:56.0)", leaf.toString());
+ assertEquals("(EQUALS ts 2015-03-17 05:34:56.0)", leaf.toString());
}
@Test
diff --git a/ql/src/test/queries/clientpositive/avro_hybrid_mixed_date.q b/ql/src/test/queries/clientpositive/avro_hybrid_mixed_date.q
new file mode 100644
index 0000000..5d9807b
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/avro_hybrid_mixed_date.q
@@ -0,0 +1,22 @@
+create table hybrid_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11');
+
+select * from hybrid_table;
+
+set hive.avro.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/avro_hybrid_mixed_timestamp.q b/ql/src/test/queries/clientpositive/avro_hybrid_mixed_timestamp.q
new file mode 100644
index 0000000..28fc99c
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/avro_hybrid_mixed_timestamp.q
@@ -0,0 +1,22 @@
+create table hybrid_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123');
+
+select * from hybrid_table;
+
+set hive.avro.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/avro_legacy_mixed_date.q b/ql/src/test/queries/clientpositive/avro_legacy_mixed_date.q
new file mode 100644
index 0000000..437e432
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/avro_legacy_mixed_date.q
@@ -0,0 +1,14 @@
+create table legacy_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro;
+
+load data local inpath '../../data/files/avro_legacy_mixed_dates.avro' into table legacy_table;
+
+select * from legacy_table;
+
+set hive.avro.proleptic.gregorian.default=true;
+
+select * from legacy_table;
+
+drop table legacy_table;
diff --git a/ql/src/test/queries/clientpositive/avro_legacy_mixed_timestamp.q b/ql/src/test/queries/clientpositive/avro_legacy_mixed_timestamp.q
new file mode 100644
index 0000000..e1e6870
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/avro_legacy_mixed_timestamp.q
@@ -0,0 +1,14 @@
+create table legacy_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro;
+
+load data local inpath '../../data/files/avro_legacy_mixed_timestamps.avro' into table legacy_table;
+
+select * from legacy_table;
+
+set hive.avro.proleptic.gregorian.default=true;
+
+select * from legacy_table;
+
+drop table legacy_table;
diff --git a/ql/src/test/queries/clientpositive/avro_proleptic_mixed_date.q b/ql/src/test/queries/clientpositive/avro_proleptic_mixed_date.q
new file mode 100644
index 0000000..401f0a6
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/avro_proleptic_mixed_date.q
@@ -0,0 +1,24 @@
+set hive.avro.proleptic.gregorian=true;
+
+create table hybrid_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11');
+
+select * from hybrid_table;
+
+set hive.avro.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/avro_proleptic_mixed_timestamp.q b/ql/src/test/queries/clientpositive/avro_proleptic_mixed_timestamp.q
new file mode 100644
index 0000000..5a67ab5
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/avro_proleptic_mixed_timestamp.q
@@ -0,0 +1,24 @@
+set hive.avro.proleptic.gregorian=true;
+
+create table hybrid_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123');
+
+select * from hybrid_table;
+
+set hive.avro.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date.q b/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date.q
index 02d923b..015b648 100644
--- a/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date.q
+++ b/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date.q
@@ -27,5 +27,11 @@ insert into table change_allowincompatible_vectorization_false_date partition (s
select ts from change_allowincompatible_vectorization_false_date where ts='2038-03-22 07:26:48.0' and s='aaa';
+insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0');
+
+select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa';
+
set hive.vectorized.execution.enabled=true;
select ts from change_allowincompatible_vectorization_false_date where ts='2038-03-22 07:26:48.0' and s='aaa';
+
+select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa';
diff --git a/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date.q b/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date2.q
similarity index 55%
copy from ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date.q
copy to ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date2.q
index 02d923b..934f43e 100644
--- a/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date.q
+++ b/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date2.q
@@ -13,19 +13,9 @@ alter table change_allowincompatible_vectorization_false_date add partition(s='a
alter table change_allowincompatible_vectorization_false_date add partition(s='bbb');
-insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') select ctimestamp1 from alltypesorc where ctimestamp1 > '2000-01-01' limit 50;
+insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0');
-insert into table change_allowincompatible_vectorization_false_date partition (s='bbb') select ctimestamp1 from alltypesorc where ctimestamp1 < '2000-01-01' limit 50;
-
-select count(*) from change_allowincompatible_vectorization_false_date;
-
-alter table change_allowincompatible_vectorization_false_date change column ts ts timestamp;
-
-select count(*) from change_allowincompatible_vectorization_false_date;
-
-insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('2038-03-22 07:26:48.0');
-
-select ts from change_allowincompatible_vectorization_false_date where ts='2038-03-22 07:26:48.0' and s='aaa';
+select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa';
set hive.vectorized.execution.enabled=true;
-select ts from change_allowincompatible_vectorization_false_date where ts='2038-03-22 07:26:48.0' and s='aaa';
+select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa';
diff --git a/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date3.q b/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date3.q
new file mode 100644
index 0000000..3b7024e
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/change_allowincompatible_vectorization_false_date3.q
@@ -0,0 +1,21 @@
+--! qt:dataset:alltypesorc
+
+set hive.vectorized.execution.enabled=false;
+set hive.support.concurrency=true;
+set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
+set hive.tez.bucket.pruning=true;
+set hive.optimize.index.filter=true;
+set hive.metastore.disallow.incompatible.col.type.changes=false;
+
+create table change_allowincompatible_vectorization_false_date (ts timestamp) partitioned by (s string) clustered by (ts) into 32 buckets stored as orc tblproperties ('transactional'='true');
+
+alter table change_allowincompatible_vectorization_false_date add partition(s='aaa');
+
+alter table change_allowincompatible_vectorization_false_date add partition(s='bbb');
+
+insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0');
+
+select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa';
+
+set hive.vectorized.execution.enabled=true;
+select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa';
diff --git a/ql/src/test/queries/clientpositive/orc_hybrid_mixed_date.q b/ql/src/test/queries/clientpositive/orc_hybrid_mixed_date.q
new file mode 100644
index 0000000..bf71ab3
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/orc_hybrid_mixed_date.q
@@ -0,0 +1,20 @@
+create table hybrid_table (d date)
+stored as orc;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11');
+
+select * from hybrid_table;
+
+set orc.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/orc_hybrid_mixed_timestamp.q b/ql/src/test/queries/clientpositive/orc_hybrid_mixed_timestamp.q
new file mode 100644
index 0000000..0fd8029
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/orc_hybrid_mixed_timestamp.q
@@ -0,0 +1,20 @@
+create table hybrid_table (d timestamp)
+stored as orc;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123');
+
+select * from hybrid_table;
+
+set orc.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/orc_legacy_mixed_date.q b/ql/src/test/queries/clientpositive/orc_legacy_mixed_date.q
new file mode 100644
index 0000000..451c983
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/orc_legacy_mixed_date.q
@@ -0,0 +1,12 @@
+create table legacy_table (d date)
+stored as orc;
+
+load data local inpath '../../data/files/orc_legacy_mixed_dates.orc' into table legacy_table;
+
+select * from legacy_table;
+
+set orc.proleptic.gregorian.default=true;
+
+select * from legacy_table;
+
+drop table legacy_table;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/orc_legacy_mixed_timestamp.q b/ql/src/test/queries/clientpositive/orc_legacy_mixed_timestamp.q
new file mode 100644
index 0000000..6488f4d
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/orc_legacy_mixed_timestamp.q
@@ -0,0 +1,12 @@
+create table legacy_table (ts timestamp)
+stored as orc;
+
+load data local inpath '../../data/files/orc_legacy_mixed_timestamps.orc' into table legacy_table;
+
+select * from legacy_table;
+
+set orc.proleptic.gregorian.default=true;
+
+select * from legacy_table;
+
+drop table legacy_table;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/orc_ppd_schema_evol_3a.q b/ql/src/test/queries/clientpositive/orc_ppd_schema_evol_3a.q
index 9d79b11..072f4ea 100644
--- a/ql/src/test/queries/clientpositive/orc_ppd_schema_evol_3a.q
+++ b/ql/src/test/queries/clientpositive/orc_ppd_schema_evol_3a.q
@@ -192,16 +192,16 @@ select count(*) from orc_ppd_n3 where f = 74.72;
alter table orc_ppd_n3 change column f f double;
SET hive.optimize.index.filter=false;
-select count(*) from orc_ppd_n3 where f = 74.72;
+select count(*) from orc_ppd_n3 where f = 74.72000122070312;
SET hive.optimize.index.filter=true;
-select count(*) from orc_ppd_n3 where f = 74.72;
+select count(*) from orc_ppd_n3 where f = 74.72000122070312;
alter table orc_ppd_n3 change column f f string;
SET hive.optimize.index.filter=false;
-select count(*) from orc_ppd_n3 where f = '74.72';
+select count(*) from orc_ppd_n3 where f = '74.72000122070312';
SET hive.optimize.index.filter=true;
-select count(*) from orc_ppd_n3 where f = '74.72';
+select count(*) from orc_ppd_n3 where f = '74.72000122070312';
SET hive.optimize.index.filter=false;
-- string tests
diff --git a/ql/src/test/queries/clientpositive/orc_proleptic_mixed_date.q b/ql/src/test/queries/clientpositive/orc_proleptic_mixed_date.q
new file mode 100644
index 0000000..55aaede
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/orc_proleptic_mixed_date.q
@@ -0,0 +1,22 @@
+set orc.proleptic.gregorian=true;
+
+create table hybrid_table (d date)
+stored as orc;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11');
+
+select * from hybrid_table;
+
+set orc.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/orc_proleptic_mixed_timestamp.q b/ql/src/test/queries/clientpositive/orc_proleptic_mixed_timestamp.q
new file mode 100644
index 0000000..92d91e4
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/orc_proleptic_mixed_timestamp.q
@@ -0,0 +1,22 @@
+set orc.proleptic.gregorian=true;
+
+create table hybrid_table (d timestamp)
+stored as orc;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123');
+
+select * from hybrid_table;
+
+set orc.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/orc_schema_evolution_float.q b/ql/src/test/queries/clientpositive/orc_schema_evolution_float.q
index 0b31902..0c48504 100644
--- a/ql/src/test/queries/clientpositive/orc_schema_evolution_float.q
+++ b/ql/src/test/queries/clientpositive/orc_schema_evolution_float.q
@@ -22,11 +22,11 @@ insert overwrite table float_orc select * from float_text;
select f from float_orc;
alter table float_orc change column f f double;
select f from float_orc;
-select f from float_orc where f=74.72;
-select f from float_orc where f=0.22;
+select f from float_orc where f=74.72000122070312;
+select f from float_orc where f=0.2199999988079071;
set hive.optimize.index.filter=true;
-select f from float_orc where f=74.72;
-select f from float_orc where f=0.22;
+select f from float_orc where f=74.72000122070312;
+select f from float_orc where f=0.2199999988079071;
alter table float_orc change column f f decimal(14,5);
select f from float_orc;
diff --git a/ql/src/test/queries/clientpositive/parquet_hybrid_mixed_date.q b/ql/src/test/queries/clientpositive/parquet_hybrid_mixed_date.q
new file mode 100644
index 0000000..67a0cee
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/parquet_hybrid_mixed_date.q
@@ -0,0 +1,20 @@
+create table hybrid_table (d date)
+stored as parquet;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11');
+
+select * from hybrid_table;
+
+set hive.parquet.date.proleptic.gregorian.default=true;
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/parquet_hybrid_mixed_timestamp.q b/ql/src/test/queries/clientpositive/parquet_hybrid_mixed_timestamp.q
new file mode 100644
index 0000000..4c64b7e
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/parquet_hybrid_mixed_timestamp.q
@@ -0,0 +1,16 @@
+create table hybrid_table (d timestamp)
+stored as parquet;
+
+INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123');
+
+select * from hybrid_table;
+
+drop table hybrid_table;
diff --git a/ql/src/test/queries/clientpositive/parquet_legacy_mixed_date.q b/ql/src/test/queries/clientpositive/parquet_legacy_mixed_date.q
new file mode 100644
index 0000000..bf2345c
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/parquet_legacy_mixed_date.q
@@ -0,0 +1,12 @@
+create table legacy_table (d date)
+stored as parquet;
+
+load data local inpath '../../data/files/parquet_legacy_mixed_dates.parq' into table legacy_table;
+
+select * from legacy_table;
+
+set hive.parquet.date.proleptic.gregorian.default=true;
+
+select * from legacy_table;
+
+drop table legacy_table;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/parquet_legacy_mixed_timestamp.q b/ql/src/test/queries/clientpositive/parquet_legacy_mixed_timestamp.q
new file mode 100644
index 0000000..280df40
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/parquet_legacy_mixed_timestamp.q
@@ -0,0 +1,8 @@
+create table legacy_table (d timestamp)
+stored as parquet;
+
+load data local inpath '../../data/files/parquet_legacy_mixed_timestamps.parq' into table legacy_table;
+
+select * from legacy_table;
+
+drop table legacy_table;
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/parquet_ppd_date.q b/ql/src/test/queries/clientpositive/parquet_ppd_date.q
index 82085be..8027e91 100644
--- a/ql/src/test/queries/clientpositive/parquet_ppd_date.q
+++ b/ql/src/test/queries/clientpositive/parquet_ppd_date.q
@@ -103,3 +103,31 @@ select * from newtypestbl_n2 where da between '1970-02-18' and '1970-02-19';
set hive.optimize.index.filter=true;
select * from newtypestbl_n2 where da between '1970-02-18' and '1970-02-19';
+
+insert overwrite table newtypestbl_n2 select * from (select cast("apple" as char(10)), cast("bee" as varchar(10)), 0.22, cast("999-02-20" as date) from src src1 union all select cast("hello" as char(10)), cast("world" as varchar(10)), 11.22, cast("1820-02-27" as date) from src src2 limit 10) uniontbl;
+
+set hive.optimize.index.filter=false;
+select * from newtypestbl_n2 where da='999-02-20';
+
+set hive.optimize.index.filter=true;
+select * from newtypestbl_n2 where da='999-02-20';
+
+set hive.optimize.index.filter=false;
+select * from newtypestbl_n2 where da=cast('999-02-20' as date);
+
+set hive.optimize.index.filter=true;
+select * from newtypestbl_n2 where da=cast('999-02-20' as date);
+
+set hive.vectorized.execution.enabled=true;
+
+set hive.optimize.index.filter=false;
+select * from newtypestbl_n2 where da='999-02-20';
+
+set hive.optimize.index.filter=true;
+select * from newtypestbl_n2 where da='999-02-20';
+
+set hive.optimize.index.filter=false;
+select * from newtypestbl_n2 where da=cast('999-02-20' as date);
+
+set hive.optimize.index.filter=true;
+select * from newtypestbl_n2 where da=cast('999-02-20' as date);
diff --git a/ql/src/test/queries/clientpositive/parquet_proleptic_mixed_date.q b/ql/src/test/queries/clientpositive/parquet_proleptic_mixed_date.q
new file mode 100644
index 0000000..17b5448
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/parquet_proleptic_mixed_date.q
@@ -0,0 +1,22 @@
+set hive.parquet.date.proleptic.gregorian=true;
+
+create table proleptic_table (d date)
+stored as parquet;
+
+INSERT INTO proleptic_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11');
+
+select * from proleptic_table;
+
+set hive.parquet.date.proleptic.gregorian.default=true;
+
+select * from proleptic_table;
+
+drop table proleptic_table;
diff --git a/ql/src/test/results/clientpositive/acid_bloom_filter_orc_file_dump.q.out b/ql/src/test/results/clientpositive/acid_bloom_filter_orc_file_dump.q.out
index cfbe9cc..da805b0 100644
--- a/ql/src/test/results/clientpositive/acid_bloom_filter_orc_file_dump.q.out
+++ b/ql/src/test/results/clientpositive/acid_bloom_filter_orc_file_dump.q.out
@@ -80,6 +80,7 @@ File Version: 0.12 with ORC_517
Rows: 1
Compression: ZLIB
Compression size: 32768
+Calendar: Julian/Gregorian
Type: struct<operation:int,originalTransaction:bigint,bucket:int,rowId:bigint,currentTransaction:bigint,row:struct<msisdn:string,imsi:varchar(20),imei:bigint,cell_id:bigint>>
Stripe Statistics:
@@ -179,7 +180,7 @@ Stripes:
Entry 0: numHashFunctions: 6 bitCount: 81472 popCount: 6 loadFactor: 0.0001 expectedFpp: 1.5953551E-25
Stripe level merge: numHashFunctions: 6 bitCount: 81472 popCount: 6 loadFactor: 0.0001 expectedFpp: 1.5953551E-25
-File length: 1203 bytes
+File length: 1205 bytes
Padding length: 0 bytes
Padding ratio: 0%
@@ -196,6 +197,7 @@ File Version: 0.12 with ORC_517
Rows: 1
Compression: ZLIB
Compression size: 32768
+Calendar: Julian/Gregorian
Type: struct<operation:int,originalTransaction:bigint,bucket:int,rowId:bigint,currentTransaction:bigint,row:struct<msisdn:string,imsi:varchar(20),imei:bigint,cell_id:bigint>>
Stripe Statistics:
@@ -295,7 +297,7 @@ Stripes:
Entry 0: numHashFunctions: 6 bitCount: 81472 popCount: 6 loadFactor: 0.0001 expectedFpp: 1.5953551E-25
Stripe level merge: numHashFunctions: 6 bitCount: 81472 popCount: 6 loadFactor: 0.0001 expectedFpp: 1.5953551E-25
-File length: 1211 bytes
+File length: 1212 bytes
Padding length: 0 bytes
Padding ratio: 0%
diff --git a/ql/src/test/results/clientpositive/acid_nullscan.q.out b/ql/src/test/results/clientpositive/acid_nullscan.q.out
index 7f548bd..0e5c241 100644
--- a/ql/src/test/results/clientpositive/acid_nullscan.q.out
+++ b/ql/src/test/results/clientpositive/acid_nullscan.q.out
@@ -96,7 +96,7 @@ STAGE PLANS:
serialization.ddl struct acid_vectorized_n1 { i32 a, string b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
- totalSize 2596
+ totalSize 2602
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -121,7 +121,7 @@ STAGE PLANS:
serialization.ddl struct acid_vectorized_n1 { i32 a, string b}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 2596
+ totalSize 2602
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/acid_stats2.q.out b/ql/src/test/results/clientpositive/acid_stats2.q.out
index dbc35fd..45c903f 100644
--- a/ql/src/test/results/clientpositive/acid_stats2.q.out
+++ b/ql/src/test/results/clientpositive/acid_stats2.q.out
@@ -311,7 +311,7 @@ Table Parameters:
numPartitions 2
numRows 3
rawDataSize 0
- totalSize 1544
+ totalSize 1548
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -350,7 +350,7 @@ Partition Parameters:
numFiles 1
numRows 1
rawDataSize 0
- totalSize 747
+ totalSize 749
#### A masked pattern was here ####
# Storage Information
@@ -387,7 +387,7 @@ Partition Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 797
+ totalSize 799
#### A masked pattern was here ####
# Storage Information
@@ -464,7 +464,7 @@ Table Parameters:
numPartitions 2
numRows 2
rawDataSize 0
- totalSize 2241
+ totalSize 2247
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -503,7 +503,7 @@ Partition Parameters:
numFiles 2
numRows 0
rawDataSize 0
- totalSize 1444
+ totalSize 1448
#### A masked pattern was here ####
# Storage Information
@@ -540,7 +540,7 @@ Partition Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 797
+ totalSize 799
#### A masked pattern was here ####
# Storage Information
@@ -644,7 +644,7 @@ Table Parameters:
numPartitions 2
numRows 1
rawDataSize 0
- totalSize 2937
+ totalSize 2945
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -683,7 +683,7 @@ Partition Parameters:
numFiles 2
numRows 0
rawDataSize 0
- totalSize 1444
+ totalSize 1448
#### A masked pattern was here ####
# Storage Information
@@ -720,7 +720,7 @@ Partition Parameters:
numFiles 2
numRows 1
rawDataSize 0
- totalSize 1493
+ totalSize 1497
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/acid_table_stats.q.out b/ql/src/test/results/clientpositive/acid_table_stats.q.out
index fde167b..c4bc80e 100644
--- a/ql/src/test/results/clientpositive/acid_table_stats.q.out
+++ b/ql/src/test/results/clientpositive/acid_table_stats.q.out
@@ -97,7 +97,7 @@ Partition Parameters:
numFiles 2
numRows 1000
rawDataSize 0
- totalSize 4380
+ totalSize 4384
#### A masked pattern was here ####
# Storage Information
@@ -184,7 +184,7 @@ Partition Parameters:
numFiles 2
numRows 1000
rawDataSize 0
- totalSize 4380
+ totalSize 4384
#### A masked pattern was here ####
# Storage Information
@@ -235,7 +235,7 @@ Partition Parameters:
numFiles 2
numRows 1000
rawDataSize 0
- totalSize 4380
+ totalSize 4384
#### A masked pattern was here ####
# Storage Information
@@ -331,7 +331,7 @@ Partition Parameters:
numFiles 4
numRows 2000
rawDataSize 0
- totalSize 8761
+ totalSize 8769
#### A masked pattern was here ####
# Storage Information
@@ -380,7 +380,7 @@ Partition Parameters:
numFiles 4
numRows 2000
rawDataSize 0
- totalSize 8761
+ totalSize 8769
#### A masked pattern was here ####
# Storage Information
@@ -593,7 +593,7 @@ Partition Parameters:
numFiles 2
numRows 1000
rawDataSize 176000
- totalSize 3322
+ totalSize 3326
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
index 5efdc06..a9b5ad0 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_4.q.out
@@ -214,7 +214,7 @@ Table Parameters:
numFiles 2
numRows 10
rawDataSize 0
- totalSize 1899
+ totalSize 1903
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -259,7 +259,7 @@ Table Parameters:
numFiles 4
numRows 8
rawDataSize 0
- totalSize 3285
+ totalSize 3293
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/avro_date.q.out b/ql/src/test/results/clientpositive/avro_date.q.out
index 32501cf..ff969a1 100644
--- a/ql/src/test/results/clientpositive/avro_date.q.out
+++ b/ql/src/test/results/clientpositive/avro_date.q.out
@@ -73,6 +73,10 @@ POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
1947-02-11 {"baz":"1921-12-16"} ["2011-09-05"] 2 2014-09-26
8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26
+1411-02-21 {"bar":"0998-05-07","foo":"0980-12-16"} ["0011-09-04","1411-09-05"] 2 2014-09-26
+1211-02-11 {"baz":"0981-12-16"} ["0011-09-05"] 2 2014-09-26
+0849-02-11 {"baz":"0921-12-16"} ["0011-09-05"] 2 2014-09-26
+0605-02-11 {"baz":"0981-12-16"} ["0039-09-05"] 2 2014-09-26
PREHOOK: query: SELECT d, COUNT(d) FROM avro_date GROUP BY d
PREHOOK: type: QUERY
PREHOOK: Input: default@avro_date
@@ -83,6 +87,10 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@avro_date
POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
#### A masked pattern was here ####
+0605-02-11 1
+0849-02-11 1
+1211-02-11 1
+1411-02-21 1
1947-02-11 1
2012-02-21 1
2014-02-11 1
@@ -100,6 +108,10 @@ POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
2012-02-21 {"bar":"1998-05-07","foo":"1980-12-16"} ["2011-09-04","2011-09-05"] 2 2014-09-26
2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
8200-02-11 {"baz":"6981-12-16"} ["1039-09-05"] 2 2014-09-26
+1411-02-21 {"bar":"0998-05-07","foo":"0980-12-16"} ["0011-09-04","1411-09-05"] 2 2014-09-26
+1211-02-11 {"baz":"0981-12-16"} ["0011-09-05"] 2 2014-09-26
+0849-02-11 {"baz":"0921-12-16"} ["0011-09-05"] 2 2014-09-26
+0605-02-11 {"baz":"0981-12-16"} ["0039-09-05"] 2 2014-09-26
PREHOOK: query: SELECT * FROM avro_date WHERE d<'2014-12-21'
PREHOOK: type: QUERY
PREHOOK: Input: default@avro_date
@@ -113,6 +125,10 @@ POSTHOOK: Input: default@avro_date@p1=2/p2=2014-09-26
2012-02-21 {"bar":"1998-05-07","foo":"1980-12-16"} ["2011-09-04","2011-09-05"] 2 2014-09-26
2014-02-11 {"baz":"1981-12-16"} ["2011-09-05"] 2 2014-09-26
1947-02-11 {"baz":"1921-12-16"} ["2011-09-05"] 2 2014-09-26
+1411-02-21 {"bar":"0998-05-07","foo":"0980-12-16"} ["0011-09-04","1411-09-05"] 2 2014-09-26
+1211-02-11 {"baz":"0981-12-16"} ["0011-09-05"] 2 2014-09-26
+0849-02-11 {"baz":"0921-12-16"} ["0011-09-05"] 2 2014-09-26
+0605-02-11 {"baz":"0981-12-16"} ["0039-09-05"] 2 2014-09-26
PREHOOK: query: SELECT * FROM avro_date WHERE d>'8000-12-01'
PREHOOK: type: QUERY
PREHOOK: Input: default@avro_date
diff --git a/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out b/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out
index 3ae8155..cd401be 100644
--- a/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out
+++ b/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out
@@ -107,7 +107,7 @@ Table Parameters:
numPartitions 7
numRows 8
rawDataSize 0
- totalSize 3294
+ totalSize 3455
#### A masked pattern was here ####
# Storage Information
@@ -219,7 +219,7 @@ Table Parameters:
numPartitions 7
numRows 8
rawDataSize 0
- totalSize 3294
+ totalSize 3455
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/avro_timestamp.q.out b/ql/src/test/results/clientpositive/avro_timestamp.q.out
index ca18fd9..0ac216a 100644
--- a/ql/src/test/results/clientpositive/avro_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/avro_timestamp.q.out
@@ -73,6 +73,10 @@ POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
1947-02-11 07:08:09.123 {"baz":"1921-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
8200-02-11 07:08:09.123 {"baz":"6981-12-16 07:08:09.123"} ["1039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1412-02-21 07:08:09.123 {"bar":"0998-05-07 07:08:09.123","foo":"0980-12-16 07:08:09.123"} ["0011-09-04 07:08:09.123","0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1214-02-11 07:08:09.123 {"baz":"0981-12-16 07:08:09.123"} ["0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+0847-02-11 07:08:09.123 {"baz":"0921-12-16 07:08:09.123"} ["0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+0600-02-11 07:08:09.123 {"baz":"0981-12-16 07:08:09.123"} ["0039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
PREHOOK: query: SELECT d, COUNT(d) FROM avro_timestamp GROUP BY d
PREHOOK: type: QUERY
PREHOOK: Input: default@avro_timestamp
@@ -83,6 +87,10 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@avro_timestamp
POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
#### A masked pattern was here ####
+0600-02-11 07:08:09.123 1
+0847-02-11 07:08:09.123 1
+1214-02-11 07:08:09.123 1
+1412-02-21 07:08:09.123 1
1947-02-11 07:08:09.123 1
2012-02-21 07:08:09.123 1
2014-02-11 07:08:09.123 1
@@ -100,6 +108,10 @@ POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
2012-02-21 07:08:09.123 {"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"} ["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
8200-02-11 07:08:09.123 {"baz":"6981-12-16 07:08:09.123"} ["1039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1412-02-21 07:08:09.123 {"bar":"0998-05-07 07:08:09.123","foo":"0980-12-16 07:08:09.123"} ["0011-09-04 07:08:09.123","0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1214-02-11 07:08:09.123 {"baz":"0981-12-16 07:08:09.123"} ["0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+0847-02-11 07:08:09.123 {"baz":"0921-12-16 07:08:09.123"} ["0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+0600-02-11 07:08:09.123 {"baz":"0981-12-16 07:08:09.123"} ["0039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
PREHOOK: query: SELECT * FROM avro_timestamp WHERE d<'2014-12-21 07:08:09.123'
PREHOOK: type: QUERY
PREHOOK: Input: default@avro_timestamp
@@ -113,6 +125,10 @@ POSTHOOK: Input: default@avro_timestamp@p1=2/p2=2014-09-26 07%3A08%3A09.123
2012-02-21 07:08:09.123 {"bar":"1998-05-07 07:08:09.123","foo":"1980-12-16 07:08:09.123"} ["2011-09-04 07:08:09.123","2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
2014-02-11 07:08:09.123 {"baz":"1981-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
1947-02-11 07:08:09.123 {"baz":"1921-12-16 07:08:09.123"} ["2011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1412-02-21 07:08:09.123 {"bar":"0998-05-07 07:08:09.123","foo":"0980-12-16 07:08:09.123"} ["0011-09-04 07:08:09.123","0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+1214-02-11 07:08:09.123 {"baz":"0981-12-16 07:08:09.123"} ["0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+0847-02-11 07:08:09.123 {"baz":"0921-12-16 07:08:09.123"} ["0011-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
+0600-02-11 07:08:09.123 {"baz":"0981-12-16 07:08:09.123"} ["0039-09-05 07:08:09.123"] 2 2014-09-26 07:08:09.123
PREHOOK: query: SELECT * FROM avro_timestamp WHERE d>'8000-12-01 07:08:09.123'
PREHOOK: type: QUERY
PREHOOK: Input: default@avro_timestamp
diff --git a/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
index fd4a6c6..2932116 100644
--- a/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
+++ b/ql/src/test/results/clientpositive/beeline/materialized_view_create_rewrite.q.out
@@ -60,7 +60,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 2
rawDataSize 408
-totalSize 467
+totalSize 468
#### A masked pattern was here ####
PREHOOK: query: create materialized view if not exists cmv_mat_view2_n4
as select a, c from cmv_basetable_n10 where a = 3
@@ -94,7 +94,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 2
rawDataSize 232
-totalSize 332
+totalSize 334
#### A masked pattern was here ####
PREHOOK: query: explain
select a, c from cmv_basetable_n10 where a = 3
diff --git a/ql/src/test/results/clientpositive/cbo_ppd_non_deterministic.q.out b/ql/src/test/results/clientpositive/cbo_ppd_non_deterministic.q.out
index bd75d7b..d90ce88 100644
--- a/ql/src/test/results/clientpositive/cbo_ppd_non_deterministic.q.out
+++ b/ql/src/test/results/clientpositive/cbo_ppd_non_deterministic.q.out
@@ -121,7 +121,7 @@ STAGE PLANS:
TableScan
alias: testa
filterExpr: ((part1 = 'CA') and (part2 = 'ABC')) (type: boolean)
- Statistics: Num rows: 2 Data size: 4876 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 5106 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: rand() (type: double)
outputColumnNames: _col0
@@ -177,7 +177,7 @@ STAGE PLANS:
TableScan
alias: testa
filterExpr: ((part1 = 'CA') and (part2 = 'ABC')) (type: boolean)
- Statistics: Num rows: 2 Data size: 4876 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 2 Data size: 5106 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: rand() (type: double)
outputColumnNames: _col0
diff --git a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
index a2645dd..ac2e48f 100644
--- a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
+++ b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out
@@ -42,7 +42,7 @@ Table Parameters:
numFiles 2
numRows 3
rawDataSize 24
- totalSize 567
+ totalSize 571
#### A masked pattern was here ####
# Storage Information
@@ -87,7 +87,7 @@ Table Parameters:
numFiles 2
numRows 3
rawDataSize 24
- totalSize 567
+ totalSize 571
#### A masked pattern was here ####
# Storage Information
@@ -185,7 +185,7 @@ Table Parameters:
numFiles 2
numRows 3
rawDataSize 24
- totalSize 567
+ totalSize 571
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/deleteAnalyze.q.out b/ql/src/test/results/clientpositive/deleteAnalyze.q.out
index ec4fe95..412ad95 100644
--- a/ql/src/test/results/clientpositive/deleteAnalyze.q.out
+++ b/ql/src/test/results/clientpositive/deleteAnalyze.q.out
@@ -54,7 +54,7 @@ Table Parameters:
numFiles 1
numRows 2
rawDataSize 634
- totalSize 595
+ totalSize 598
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_date.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_date.q.out
index d8831fb..e6710d5 100644
--- a/ql/src/test/results/clientpositive/extrapolate_part_stats_date.q.out
+++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_date.q.out
@@ -171,7 +171,7 @@ STAGE PLANS:
serialization.ddl struct date_dim_n1 { date d_date}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 199
+ totalSize 201
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -217,7 +217,7 @@ STAGE PLANS:
serialization.ddl struct date_dim_n1 { date d_date}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 199
+ totalSize 201
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -263,7 +263,7 @@ STAGE PLANS:
serialization.ddl struct date_dim_n1 { date d_date}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 199
+ totalSize 201
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -309,7 +309,7 @@ STAGE PLANS:
serialization.ddl struct date_dim_n1 { date d_date}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 199
+ totalSize 201
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
index 1c7e223..97f31bf 100644
--- a/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
+++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out
@@ -145,7 +145,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 383
+ totalSize 385
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -191,7 +191,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 404
+ totalSize 406
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -270,7 +270,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 383
+ totalSize 385
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -316,7 +316,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 404
+ totalSize 406
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -489,7 +489,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 291
+ totalSize 293
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -536,7 +536,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 293
+ totalSize 295
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -583,7 +583,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 275
+ totalSize 277
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -630,7 +630,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 275
+ totalSize 277
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -714,7 +714,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 291
+ totalSize 293
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -761,7 +761,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 293
+ totalSize 295
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -808,7 +808,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 275
+ totalSize 277
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -855,7 +855,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 275
+ totalSize 277
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
index 3ade1d8..36251de 100644
--- a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
+++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out
@@ -175,7 +175,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 383
+ totalSize 385
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -221,7 +221,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 405
+ totalSize 407
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -267,7 +267,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 425
+ totalSize 428
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -313,7 +313,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 432
+ totalSize 434
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -396,7 +396,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 383
+ totalSize 385
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -442,7 +442,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 405
+ totalSize 407
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -488,7 +488,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 425
+ totalSize 428
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -534,7 +534,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 432
+ totalSize 434
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -645,7 +645,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 383
+ totalSize 385
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -691,7 +691,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 405
+ totalSize 407
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -737,7 +737,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 425
+ totalSize 428
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -783,7 +783,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 432
+ totalSize 434
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -866,7 +866,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 383
+ totalSize 385
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -912,7 +912,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 405
+ totalSize 407
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -958,7 +958,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 425
+ totalSize 428
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1004,7 +1004,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n1 { string state, i32 locid, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 432
+ totalSize 434
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1184,7 +1184,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 296
+ totalSize 298
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1231,7 +1231,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 305
+ totalSize 307
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1278,7 +1278,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 312
+ totalSize 314
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1325,7 +1325,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 291
+ totalSize 293
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1372,7 +1372,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 286
+ totalSize 288
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1419,7 +1419,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 298
+ totalSize 300
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1466,7 +1466,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 307
+ totalSize 309
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1513,7 +1513,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 275
+ totalSize 277
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1560,7 +1560,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 275
+ totalSize 277
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1607,7 +1607,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 308
+ totalSize 310
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1654,7 +1654,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 305
+ totalSize 307
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1752,7 +1752,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 296
+ totalSize 298
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1799,7 +1799,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 305
+ totalSize 307
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1846,7 +1846,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 312
+ totalSize 314
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1893,7 +1893,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 291
+ totalSize 293
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1940,7 +1940,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 286
+ totalSize 288
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1987,7 +1987,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 298
+ totalSize 300
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2034,7 +2034,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 307
+ totalSize 309
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2081,7 +2081,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 275
+ totalSize 277
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2128,7 +2128,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 275
+ totalSize 277
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2175,7 +2175,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 308
+ totalSize 310
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2222,7 +2222,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n1 { string state, i32 locid}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 305
+ totalSize 307
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
diff --git a/ql/src/test/results/clientpositive/llap/acid_bucket_pruning.q.out b/ql/src/test/results/clientpositive/llap/acid_bucket_pruning.q.out
index 892d291..d4aa024 100644
--- a/ql/src/test/results/clientpositive/llap/acid_bucket_pruning.q.out
+++ b/ql/src/test/results/clientpositive/llap/acid_bucket_pruning.q.out
@@ -112,7 +112,7 @@ STAGE PLANS:
serialization.ddl struct acidtbldefault { i32 a}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 34777
+ totalSize 34811
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -138,7 +138,7 @@ STAGE PLANS:
serialization.ddl struct acidtbldefault { i32 a}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 34777
+ totalSize 34811
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out b/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out
index fbf4e48..03b8dc3 100644
--- a/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out
+++ b/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out
@@ -286,7 +286,7 @@ Table Parameters:
numPartitions 4
numRows 2003
rawDataSize 0
- totalSize 18013
+ totalSize 18027
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -398,7 +398,7 @@ Table Parameters:
numPartitions 4
numRows 2003
rawDataSize 0
- totalSize 18013
+ totalSize 18027
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/llap/alter_merge_orc.q.out b/ql/src/test/results/clientpositive/llap/alter_merge_orc.q.out
index cf90940..571651c 100644
--- a/ql/src/test/results/clientpositive/llap/alter_merge_orc.q.out
+++ b/ql/src/test/results/clientpositive/llap/alter_merge_orc.q.out
@@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value}
partitioned:false
partitionColumns:
totalNumberFiles:3
-totalFileSize:7590
-maxFileSize:2530
-minFileSize:2530
+totalFileSize:7596
+maxFileSize:2532
+minFileSize:2532
#### A masked pattern was here ####
PREHOOK: query: select count(1) from src_orc_merge_test
@@ -91,9 +91,9 @@ columns:struct columns { i32 key, string value}
partitioned:false
partitionColumns:
totalNumberFiles:1
-totalFileSize:7214
-maxFileSize:7214
-minFileSize:7214
+totalFileSize:7216
+maxFileSize:7216
+minFileSize:7216
#### A masked pattern was here ####
PREHOOK: query: select count(1) from src_orc_merge_test
@@ -171,9 +171,9 @@ columns:struct columns { i32 key, string value}
partitioned:true
partitionColumns:struct partition_columns { string ds}
totalNumberFiles:3
-totalFileSize:7590
-maxFileSize:2530
-minFileSize:2530
+totalFileSize:7596
+maxFileSize:2532
+minFileSize:2532
#### A masked pattern was here ####
PREHOOK: query: select count(1) from src_orc_merge_test_part_n2
@@ -218,9 +218,9 @@ columns:struct columns { i32 key, string value}
partitioned:true
partitionColumns:struct partition_columns { string ds}
totalNumberFiles:1
-totalFileSize:7214
-maxFileSize:7214
-minFileSize:7214
+totalFileSize:7216
+maxFileSize:7216
+minFileSize:7216
#### A masked pattern was here ####
PREHOOK: query: select count(1) from src_orc_merge_test_part_n2
diff --git a/ql/src/test/results/clientpositive/llap/alter_merge_stats_orc.q.out b/ql/src/test/results/clientpositive/llap/alter_merge_stats_orc.q.out
index 5ed7d70..785c8c1 100644
--- a/ql/src/test/results/clientpositive/llap/alter_merge_stats_orc.q.out
+++ b/ql/src/test/results/clientpositive/llap/alter_merge_stats_orc.q.out
@@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value}
partitioned:false
partitionColumns:
totalNumberFiles:3
-totalFileSize:7590
-maxFileSize:2530
-minFileSize:2530
+totalFileSize:7596
+maxFileSize:2532
+minFileSize:2532
#### A masked pattern was here ####
PREHOOK: query: desc extended src_orc_merge_test_stat
@@ -93,7 +93,7 @@ Table Parameters:
numFiles 3
numRows 1500
rawDataSize 141000
- totalSize 7590
+ totalSize 7596
#### A masked pattern was here ####
# Storage Information
@@ -144,7 +144,7 @@ Table Parameters:
numFiles 1
numRows 1500
rawDataSize 141000
- totalSize 7214
+ totalSize 7216
#### A masked pattern was here ####
# Storage Information
@@ -214,9 +214,9 @@ columns:struct columns { i32 key, string value}
partitioned:true
partitionColumns:struct partition_columns { string ds}
totalNumberFiles:3
-totalFileSize:7590
-maxFileSize:2530
-minFileSize:2530
+totalFileSize:7596
+maxFileSize:2532
+minFileSize:2532
#### A masked pattern was here ####
PREHOOK: query: desc formatted src_orc_merge_test_part_stat partition (ds='2011')
@@ -243,7 +243,7 @@ Partition Parameters:
numFiles 3
numRows 1500
rawDataSize 141000
- totalSize 7590
+ totalSize 7596
#### A masked pattern was here ####
# Storage Information
@@ -290,7 +290,7 @@ Partition Parameters:
numFiles 3
numRows 1500
rawDataSize 141000
- totalSize 7590
+ totalSize 7596
#### A masked pattern was here ####
# Storage Information
@@ -345,7 +345,7 @@ Partition Parameters:
numFiles 1
numRows 1500
rawDataSize 141000
- totalSize 7214
+ totalSize 7216
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/avro_hybrid_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/avro_hybrid_mixed_date.q.out
new file mode 100644
index 0000000..fd74c4e
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/avro_hybrid_mixed_date.q.out
@@ -0,0 +1,79 @@
+PREHOOK: query: create table hybrid_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/avro_hybrid_mixed_timestamp.q.out b/ql/src/test/results/clientpositive/llap/avro_hybrid_mixed_timestamp.q.out
new file mode 100644
index 0000000..9861ff1
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/avro_hybrid_mixed_timestamp.q.out
@@ -0,0 +1,79 @@
+PREHOOK: query: create table hybrid_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/avro_legacy_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/avro_legacy_mixed_date.q.out
new file mode 100644
index 0000000..4aec067
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/avro_legacy_mixed_date.q.out
@@ -0,0 +1,62 @@
+PREHOOK: query: create table legacy_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: create table legacy_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: load data local inpath '../../data/files/avro_legacy_mixed_dates.avro' into table legacy_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: load data local inpath '../../data/files/avro_legacy_mixed_dates.avro' into table legacy_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-27
+1014-02-17
+0947-02-16
+0200-02-10
+PREHOOK: query: drop table legacy_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@legacy_table
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: drop table legacy_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@legacy_table
+POSTHOOK: Output: default@legacy_table
diff --git a/ql/src/test/results/clientpositive/llap/avro_legacy_mixed_timestamp.q.out b/ql/src/test/results/clientpositive/llap/avro_legacy_mixed_timestamp.q.out
new file mode 100644
index 0000000..27c6f3d
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/avro_legacy_mixed_timestamp.q.out
@@ -0,0 +1,62 @@
+PREHOOK: query: create table legacy_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: create table legacy_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: load data local inpath '../../data/files/avro_legacy_mixed_timestamps.avro' into table legacy_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: load data local inpath '../../data/files/avro_legacy_mixed_timestamps.avro' into table legacy_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-27 07:15:11.123
+1014-02-17 07:15:11.123
+0947-02-16 07:15:11.123
+0200-02-10 07:15:11.123
+PREHOOK: query: drop table legacy_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@legacy_table
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: drop table legacy_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@legacy_table
+POSTHOOK: Output: default@legacy_table
diff --git a/ql/src/test/results/clientpositive/llap/avro_proleptic_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/avro_proleptic_mixed_date.q.out
new file mode 100644
index 0000000..fd74c4e
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/avro_proleptic_mixed_date.q.out
@@ -0,0 +1,79 @@
+PREHOOK: query: create table hybrid_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d date)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/avro_proleptic_mixed_timestamp.q.out b/ql/src/test/results/clientpositive/llap/avro_proleptic_mixed_timestamp.q.out
new file mode 100644
index 0000000..9861ff1
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/avro_proleptic_mixed_timestamp.q.out
@@ -0,0 +1,79 @@
+PREHOOK: query: create table hybrid_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d timestamp)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'
+COLLECTION ITEMS TERMINATED BY ',' MAP KEYS TERMINATED BY ':'
+stored as avro
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out
index 057f5b4..85d2e19 100644
--- a/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucket_map_join_tez2.q.out
@@ -2535,7 +2535,7 @@ STAGE PLANS:
serialization.ddl struct my_dim { string join_col, string filter_col}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 338
+ totalSize 340
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2557,7 +2557,7 @@ STAGE PLANS:
serialization.ddl struct my_dim { string join_col, string filter_col}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 338
+ totalSize 340
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
name: default.my_dim
diff --git a/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date.q.out b/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date.q.out
index f3716b1..26b3d34 100644
--- a/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date.q.out
+++ b/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date.q.out
@@ -84,6 +84,26 @@ POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date
POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
#### A masked pattern was here ####
2038-03-22 07:26:48
+PREHOOK: query: insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=aaa
+POSTHOOK: query: insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=aaa
+POSTHOOK: Lineage: change_allowincompatible_vectorization_false_date PARTITION(s=aaa).ts SCRIPT []
+PREHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+POSTHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+0001-01-01 00:00:00
PREHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='2038-03-22 07:26:48.0' and s='aaa'
PREHOOK: type: QUERY
PREHOOK: Input: default@change_allowincompatible_vectorization_false_date
@@ -95,3 +115,14 @@ POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date
POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
#### A masked pattern was here ####
2038-03-22 07:26:48
+PREHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+POSTHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+0001-01-01 00:00:00
diff --git a/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date2.q.out b/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date2.q.out
new file mode 100644
index 0000000..eff5a3b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date2.q.out
@@ -0,0 +1,53 @@
+PREHOOK: query: create table change_allowincompatible_vectorization_false_date (ts date) partitioned by (s string) clustered by (ts) into 32 buckets stored as orc tblproperties ('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: query: create table change_allowincompatible_vectorization_false_date (ts date) partitioned by (s string) clustered by (ts) into 32 buckets stored as orc tblproperties ('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date
+PREHOOK: query: alter table change_allowincompatible_vectorization_false_date add partition(s='aaa')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: query: alter table change_allowincompatible_vectorization_false_date add partition(s='aaa')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=aaa
+PREHOOK: query: alter table change_allowincompatible_vectorization_false_date add partition(s='bbb')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: query: alter table change_allowincompatible_vectorization_false_date add partition(s='bbb')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=bbb
+PREHOOK: query: insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=aaa
+POSTHOOK: query: insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=aaa
+POSTHOOK: Lineage: change_allowincompatible_vectorization_false_date PARTITION(s=aaa).ts SCRIPT []
+PREHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+POSTHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+0001-01-01
+PREHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+POSTHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+0001-01-01
diff --git a/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date3.q.out b/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date3.q.out
new file mode 100644
index 0000000..7cd84a2
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/change_allowincompatible_vectorization_false_date3.q.out
@@ -0,0 +1,53 @@
+PREHOOK: query: create table change_allowincompatible_vectorization_false_date (ts timestamp) partitioned by (s string) clustered by (ts) into 32 buckets stored as orc tblproperties ('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: query: create table change_allowincompatible_vectorization_false_date (ts timestamp) partitioned by (s string) clustered by (ts) into 32 buckets stored as orc tblproperties ('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date
+PREHOOK: query: alter table change_allowincompatible_vectorization_false_date add partition(s='aaa')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: query: alter table change_allowincompatible_vectorization_false_date add partition(s='aaa')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=aaa
+PREHOOK: query: alter table change_allowincompatible_vectorization_false_date add partition(s='bbb')
+PREHOOK: type: ALTERTABLE_ADDPARTS
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: query: alter table change_allowincompatible_vectorization_false_date add partition(s='bbb')
+POSTHOOK: type: ALTERTABLE_ADDPARTS
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=bbb
+PREHOOK: query: insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=aaa
+POSTHOOK: query: insert into table change_allowincompatible_vectorization_false_date partition (s='aaa') values ('0001-01-01 00:00:00.0')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@change_allowincompatible_vectorization_false_date@s=aaa
+POSTHOOK: Lineage: change_allowincompatible_vectorization_false_date PARTITION(s=aaa).ts SCRIPT []
+PREHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+POSTHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+0001-01-01 00:00:00
+PREHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date
+PREHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+POSTHOOK: query: select ts from change_allowincompatible_vectorization_false_date where ts='0001-01-01 00:00:00.0' and s='aaa'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date
+POSTHOOK: Input: default@change_allowincompatible_vectorization_false_date@s=aaa
+#### A masked pattern was here ####
+0001-01-01 00:00:00
diff --git a/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out b/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out
index 7a56812..5266770 100644
--- a/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out
+++ b/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out
@@ -42,7 +42,7 @@ Table Parameters:
numFiles 1
numRows 1
rawDataSize 170
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
# Storage Information
@@ -127,7 +127,7 @@ STAGE PLANS:
serialization.ddl struct s_n0 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -149,7 +149,7 @@ STAGE PLANS:
serialization.ddl struct s_n0 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
name: default.s_n0
@@ -229,7 +229,7 @@ Table Parameters:
numFiles 1
numRows 1
rawDataSize 170
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
# Storage Information
@@ -306,7 +306,7 @@ Table Parameters:
numPartitions 2
numRows 2
rawDataSize 340
- totalSize 566
+ totalSize 570
#### A masked pattern was here ####
# Storage Information
@@ -402,7 +402,7 @@ STAGE PLANS:
serialization.ddl struct spart_n0 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -447,7 +447,7 @@ STAGE PLANS:
serialization.ddl struct spart_n0 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -566,7 +566,7 @@ Table Parameters:
numPartitions 2
numRows 2
rawDataSize 340
- totalSize 566
+ totalSize 570
#### A masked pattern was here ####
# Storage Information
@@ -604,7 +604,7 @@ Partition Parameters:
numFiles 1
numRows 1
rawDataSize 170
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
# Storage Information
@@ -642,7 +642,7 @@ Partition Parameters:
numFiles 1
numRows 1
rawDataSize 170
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
# Storage Information
@@ -723,7 +723,7 @@ Table Parameters:
numPartitions 2
numRows 2
rawDataSize 340
- totalSize 566
+ totalSize 570
#### A masked pattern was here ####
# Storage Information
@@ -816,7 +816,7 @@ STAGE PLANS:
serialization.ddl struct spart_n0 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -930,7 +930,7 @@ Table Parameters:
numPartitions 2
numRows 2
rawDataSize 340
- totalSize 566
+ totalSize 570
#### A masked pattern was here ####
# Storage Information
@@ -968,7 +968,7 @@ Partition Parameters:
numFiles 1
numRows 1
rawDataSize 170
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
# Storage Information
@@ -1006,7 +1006,7 @@ Partition Parameters:
numFiles 1
numRows 1
rawDataSize 170
- totalSize 283
+ totalSize 285
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/default_constraint.q.out b/ql/src/test/results/clientpositive/llap/default_constraint.q.out
index f6e05f2..a04da4e 100644
--- a/ql/src/test/results/clientpositive/llap/default_constraint.q.out
+++ b/ql/src/test/results/clientpositive/llap/default_constraint.q.out
@@ -1556,7 +1556,7 @@ Table Type: MANAGED_TABLE
Table Parameters:
bucketing_version 2
numFiles 1
- totalSize 1099
+ totalSize 1101
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -1732,7 +1732,7 @@ Table Parameters:
bucketing_version 2
#### A masked pattern was here ####
numFiles 2
- totalSize 2198
+ totalSize 2202
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -1812,7 +1812,7 @@ Table Parameters:
bucketing_version 2
#### A masked pattern was here ####
numFiles 2
- totalSize 2198
+ totalSize 2202
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -1999,7 +1999,7 @@ Table Parameters:
bucketing_version 2
#### A masked pattern was here ####
numFiles 3
- totalSize 3281
+ totalSize 3287
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -2078,7 +2078,7 @@ Table Parameters:
bucketing_version 2
#### A masked pattern was here ####
numFiles 3
- totalSize 3281
+ totalSize 3287
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -2158,7 +2158,7 @@ Table Parameters:
bucketing_version 2
#### A masked pattern was here ####
numFiles 3
- totalSize 3281
+ totalSize 3287
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -2758,7 +2758,7 @@ Table Type: MANAGED_TABLE
Table Parameters:
bucketing_version 2
numFiles 1
- totalSize 1099
+ totalSize 1101
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out b/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out
index 832afff..030b61d 100644
--- a/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out
+++ b/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out
@@ -54,7 +54,7 @@ Table Parameters:
numFiles 1
numRows 2
rawDataSize 634
- totalSize 595
+ totalSize 598
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out
index c1d7329..7076388 100644
--- a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out
+++ b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction.q.out
@@ -1794,7 +1794,7 @@ STAGE PLANS:
serialization.ddl struct srcpart_date_n7 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 3052
+ totalSize 3054
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1838,7 +1838,7 @@ STAGE PLANS:
serialization.ddl struct srcpart_date_n7 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 3052
+ totalSize 3054
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1975,7 +1975,7 @@ STAGE PLANS:
serialization.ddl struct srcpart_small_n3 { string key1, string value1}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 469
+ totalSize 471
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
diff --git a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_user_level.q.out b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_user_level.q.out
index 28f5568..c8fa625 100644
--- a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_user_level.q.out
+++ b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_user_level.q.out
@@ -995,7 +995,7 @@ STAGE PLANS:
serialization.ddl struct srcpart_date_n9 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 3052
+ totalSize 3054
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1041,7 +1041,7 @@ STAGE PLANS:
serialization.ddl struct srcpart_date_n9 { string key, string value}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 3052
+ totalSize 3054
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1183,7 +1183,7 @@ STAGE PLANS:
serialization.ddl struct srcpart_small_n4 { string key1, string value1}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 469
+ totalSize 471
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
diff --git a/ql/src/test/results/clientpositive/llap/dynpart_sort_opt_vectorization.q.out b/ql/src/test/results/clientpositive/llap/dynpart_sort_opt_vectorization.q.out
index c1a4922..d9dad5c 100644
--- a/ql/src/test/results/clientpositive/llap/dynpart_sort_opt_vectorization.q.out
+++ b/ql/src/test/results/clientpositive/llap/dynpart_sort_opt_vectorization.q.out
@@ -1198,7 +1198,7 @@ Partition Parameters:
numFiles 2
numRows 32
rawDataSize 640
- totalSize 1428
+ totalSize 1432
#### A masked pattern was here ####
# Storage Information
@@ -1238,7 +1238,7 @@ Partition Parameters:
numFiles 2
numRows 6
rawDataSize 120
- totalSize 1132
+ totalSize 1136
#### A masked pattern was here ####
# Storage Information
@@ -1278,7 +1278,7 @@ Partition Parameters:
numFiles 2
numRows 14
rawDataSize 280
- totalSize 1246
+ totalSize 1250
#### A masked pattern was here ####
# Storage Information
@@ -1318,7 +1318,7 @@ Partition Parameters:
numFiles 2
numRows 6
rawDataSize 120
- totalSize 1132
+ totalSize 1136
#### A masked pattern was here ####
# Storage Information
@@ -1357,7 +1357,7 @@ Partition Parameters:
numFiles 8
numRows 32
rawDataSize 640
- totalSize 4640
+ totalSize 4656
#### A masked pattern was here ####
# Storage Information
@@ -1396,7 +1396,7 @@ Partition Parameters:
numFiles 4
numRows 6
rawDataSize 120
- totalSize 2074
+ totalSize 2082
#### A masked pattern was here ####
# Storage Information
@@ -1435,7 +1435,7 @@ Partition Parameters:
numFiles 8
numRows 32
rawDataSize 640
- totalSize 4658
+ totalSize 4674
#### A masked pattern was here ####
# Storage Information
@@ -1474,7 +1474,7 @@ Partition Parameters:
numFiles 4
numRows 6
rawDataSize 120
- totalSize 2074
+ totalSize 2082
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization2.q.out b/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization2.q.out
index 701573b..bb3b6c3 100644
--- a/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization2.q.out
+++ b/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization2.q.out
@@ -1363,7 +1363,7 @@ Partition Parameters:
numFiles 1
numRows 11
rawDataSize 88
- totalSize 464
+ totalSize 466
#### A masked pattern was here ####
# Storage Information
@@ -1421,7 +1421,7 @@ Partition Parameters:
numFiles 1
numRows 13
rawDataSize 104
- totalSize 487
+ totalSize 489
#### A masked pattern was here ####
# Storage Information
@@ -1621,7 +1621,7 @@ Partition Parameters:
numFiles 1
numRows 11
rawDataSize 88
- totalSize 464
+ totalSize 466
#### A masked pattern was here ####
# Storage Information
@@ -1679,7 +1679,7 @@ Partition Parameters:
numFiles 1
numRows 13
rawDataSize 104
- totalSize 487
+ totalSize 489
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out b/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out
index 95aae72..57dbbe3 100644
--- a/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out
+++ b/ql/src/test/results/clientpositive/llap/dynpart_sort_optimization_acid.q.out
@@ -1426,7 +1426,7 @@ STAGE PLANS:
TableScan
alias: acid_2l_part_sdpo
filterExpr: (value = 'bar') (type: boolean)
- Statistics: Num rows: 4200 Data size: 1247197 Basic stats: COMPLETE Column stats: PARTIAL
+ Statistics: Num rows: 4200 Data size: 1247277 Basic stats: COMPLETE Column stats: PARTIAL
Filter Operator
predicate: (value = 'bar') (type: boolean)
Statistics: Num rows: 5 Data size: 1375 Basic stats: COMPLETE Column stats: PARTIAL
diff --git a/ql/src/test/results/clientpositive/llap/extrapolate_part_stats_partial_ndv.q.out b/ql/src/test/results/clientpositive/llap/extrapolate_part_stats_partial_ndv.q.out
index 7a8ac4d..e522926 100644
--- a/ql/src/test/results/clientpositive/llap/extrapolate_part_stats_partial_ndv.q.out
+++ b/ql/src/test/results/clientpositive/llap/extrapolate_part_stats_partial_ndv.q.out
@@ -299,7 +299,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 542
+ totalSize 544
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -345,7 +345,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 581
+ totalSize 583
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -391,7 +391,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 597
+ totalSize 599
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -437,7 +437,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 620
+ totalSize 622
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -690,7 +690,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 542
+ totalSize 544
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -736,7 +736,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 581
+ totalSize 583
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -782,7 +782,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 597
+ totalSize 599
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -828,7 +828,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_1d_n0 { string state, double locid, decimal(10,0) cnt, i32 zip}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 620
+ totalSize 622
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1131,7 +1131,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 399
+ totalSize 401
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1178,7 +1178,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 424
+ totalSize 425
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1225,7 +1225,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 438
+ totalSize 439
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1272,7 +1272,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 399
+ totalSize 401
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1319,7 +1319,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 407
+ totalSize 409
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1366,7 +1366,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 402
+ totalSize 404
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1413,7 +1413,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 423
+ totalSize 425
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1460,7 +1460,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 378
+ totalSize 380
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1507,7 +1507,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 373
+ totalSize 375
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1554,7 +1554,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 427
+ totalSize 429
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1601,7 +1601,7 @@ STAGE PLANS:
serialization.ddl struct loc_orc_2d_n0 { string state, i32 locid, decimal(10,0) cnt}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 428
+ totalSize 430
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
diff --git a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out b/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
index 3c76a2c..dba4201 100644
--- a/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
+++ b/ql/src/test/results/clientpositive/llap/insert_values_orig_table_use_metadata.q.out
@@ -344,7 +344,7 @@ Table Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 1652
+ totalSize 1654
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -442,7 +442,7 @@ Table Parameters:
numFiles 2
numRows 4
rawDataSize 0
- totalSize 3304
+ totalSize 3308
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -536,7 +536,7 @@ Table Parameters:
numFiles 3
numRows 12292
rawDataSize 0
- totalSize 312862
+ totalSize 312868
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out b/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out
index c8a1987..a6167b9 100644
--- a/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out
+++ b/ql/src/test/results/clientpositive/llap/llap_nullscan.q.out
@@ -105,7 +105,7 @@ STAGE PLANS:
serialization.ddl struct src_orc_n1 { string key, string value, string ds, string hr}
serialization.format 1
serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
- totalSize 644
+ totalSize 646
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.serde2.NullStructSerDe
@@ -128,7 +128,7 @@ STAGE PLANS:
serialization.ddl struct src_orc_n1 { string key, string value, string ds, string hr}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 644
+ totalSize 646
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
name: default.src_orc_n1
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out
index cb7abc6..ff5e27f 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_create.q.out
@@ -50,7 +50,7 @@ Table Parameters:
numFiles 1
numRows 5
rawDataSize 1025
- totalSize 509
+ totalSize 511
#### A masked pattern was here ####
# Storage Information
@@ -112,7 +112,7 @@ Table Parameters:
numFiles 1
numRows 5
rawDataSize 580
- totalSize 354
+ totalSize 356
#### A masked pattern was here ####
# Storage Information
@@ -250,7 +250,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 5
rawDataSize 1605
-totalSize 725
+totalSize 727
#### A masked pattern was here ####
PREHOOK: query: drop materialized view cmv_mat_view_n4
PREHOOK: type: DROP_MATERIALIZED_VIEW
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out
index fe8a83e..2352a87 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite.q.out
@@ -60,7 +60,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 2
rawDataSize 408
-totalSize 467
+totalSize 468
#### A masked pattern was here ####
PREHOOK: query: create materialized view if not exists cmv_mat_view2_n4
as select a, c from cmv_basetable_n10 where a = 3
@@ -94,7 +94,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 2
rawDataSize 232
-totalSize 332
+totalSize 334
#### A masked pattern was here ####
PREHOOK: query: explain
select a, c from cmv_basetable_n10 where a = 3
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out
index 25ce6d6..41adbfa 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_4.q.out
@@ -289,7 +289,7 @@ Table Parameters:
numFiles 2
numRows 2
rawDataSize 0
- totalSize 1539
+ totalSize 1542
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -528,7 +528,7 @@ Table Parameters:
numFiles 2
numRows 2
rawDataSize 0
- totalSize 1539
+ totalSize 1542
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -1011,7 +1011,7 @@ Table Parameters:
numFiles 3
numRows 3
rawDataSize 0
- totalSize 2296
+ totalSize 2301
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -1312,7 +1312,7 @@ Table Parameters:
numFiles 2
numRows 3
rawDataSize 0
- totalSize 1041
+ totalSize 1047
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -1613,7 +1613,7 @@ Table Parameters:
numFiles 2
numRows 2
rawDataSize 0
- totalSize 1039
+ totalSize 1044
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -2023,7 +2023,7 @@ Table Parameters:
numFiles 3
numRows 3
rawDataSize 0
- totalSize 1795
+ totalSize 1801
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out
index 0b0e49b..68e9039 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_5.q.out
@@ -387,7 +387,7 @@ Table Parameters:
numFiles 2
numRows 5
rawDataSize 0
- totalSize 1502
+ totalSize 1505
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out
index c6dc4f5..54990a8 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_dummy.q.out
@@ -60,7 +60,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 2
rawDataSize 408
-totalSize 467
+totalSize 468
#### A masked pattern was here ####
PREHOOK: query: create materialized view if not exists cmv_mat_view2
as select a, c from cmv_basetable_n0 where a = 3
@@ -94,7 +94,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 2
rawDataSize 232
-totalSize 332
+totalSize 334
#### A masked pattern was here ####
PREHOOK: query: explain
select a, c from cmv_basetable_n0 where a = 3
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_multi_db.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_multi_db.q.out
index d94baca..8335c77 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_multi_db.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_multi_db.q.out
@@ -84,7 +84,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 2
rawDataSize 408
-totalSize 467
+totalSize 468
#### A masked pattern was here ####
PREHOOK: query: create materialized view if not exists cmv_mat_view2_n2
as select a, c from db1.cmv_basetable_n7 where a = 3
@@ -118,7 +118,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 2
rawDataSize 232
-totalSize 332
+totalSize 334
#### A masked pattern was here ####
PREHOOK: query: create database db3
PREHOOK: type: CREATEDATABASE
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out
index c33b0bb..421088f 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window.q.out
@@ -284,7 +284,7 @@ Table Parameters:
numRows 2
rawDataSize 232
rewriting.time.window 5min
- totalSize 606
+ totalSize 610
#### A masked pattern was here ####
# Storage Information
@@ -518,7 +518,7 @@ Table Parameters:
numRows 2
rawDataSize 232
rewriting.time.window 5min
- totalSize 606
+ totalSize 610
#### A masked pattern was here ####
# Storage Information
@@ -853,7 +853,7 @@ Table Parameters:
numRows 3
rawDataSize 348
rewriting.time.window 5min
- totalSize 636
+ totalSize 640
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window_2.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window_2.q.out
index aee7e8b..6cc539f 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_create_rewrite_time_window_2.q.out
@@ -94,7 +94,7 @@ Table Parameters:
numFiles 2
numRows 2
rawDataSize 232
- totalSize 606
+ totalSize 610
#### A masked pattern was here ####
# Storage Information
@@ -153,7 +153,7 @@ Table Parameters:
numFiles 2
numRows 2
rawDataSize 232
- totalSize 606
+ totalSize 610
#### A masked pattern was here ####
# Storage Information
@@ -212,7 +212,7 @@ Table Parameters:
numFiles 2
numRows 3
rawDataSize 348
- totalSize 636
+ totalSize 640
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out
index 3beb09c..9e459b0 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_describe.q.out
@@ -73,7 +73,7 @@ Table Parameters:
numFiles 1
numRows 5
rawDataSize 580
- totalSize 354
+ totalSize 356
#### A masked pattern was here ####
# Storage Information
@@ -102,7 +102,7 @@ numFiles 1
numFilesErasureCoded 0
numRows 5
rawDataSize 580
-totalSize 354
+totalSize 356
#### A masked pattern was here ####
PREHOOK: query: select a, c from cmv_mat_view_n8
PREHOOK: type: QUERY
@@ -245,7 +245,7 @@ Table Parameters:
numFiles 1
numRows 5
rawDataSize 1025
- totalSize 509
+ totalSize 511
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_drop.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_drop.q.out
index 97c67e0..9f7e3b4 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_drop.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_drop.q.out
@@ -20,9 +20,9 @@ columns:struct columns { i32 cint, string cstring1}
partitioned:false
partitionColumns:
totalNumberFiles:1
-totalFileSize:47137
-maxFileSize:47137
-minFileSize:47137
+totalFileSize:47139
+maxFileSize:47139
+minFileSize:47139
#### A masked pattern was here ####
PREHOOK: query: drop materialized view dmv_mat_view
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_partition_cluster.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_partition_cluster.q.out
index 7e3bb01..25c5aed 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_partition_cluster.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_partition_cluster.q.out
@@ -297,7 +297,7 @@ Table Parameters:
numPartitions 32
numRows 55
rawDataSize 9790
- totalSize 10806
+ totalSize 10870
#### A masked pattern was here ####
# Storage Information
@@ -1805,7 +1805,7 @@ Table Parameters:
numPartitions 32
numRows 57
rawDataSize 11062
- totalSize 17102
+ totalSize 17166
#### A masked pattern was here ####
# Storage Information
@@ -2598,7 +2598,7 @@ Table Parameters:
numPartitions 32
numRows 58
rawDataSize 11257
- totalSize 17105
+ totalSize 17169
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out
index 49099f0..85e22c7 100644
--- a/ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out
+++ b/ql/src/test/results/clientpositive/llap/materialized_view_partitioned.q.out
@@ -260,7 +260,7 @@ Table Parameters:
numPartitions 32
numRows 55
rawDataSize 5005
- totalSize 7618
+ totalSize 7682
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/orc_analyze.q.out b/ql/src/test/results/clientpositive/llap/orc_analyze.q.out
index 04ab995..804f7ed 100644
--- a/ql/src/test/results/clientpositive/llap/orc_analyze.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_analyze.q.out
@@ -102,7 +102,7 @@ Table Parameters:
numFiles 1
numRows 100
rawDataSize 52600
- totalSize 3227
+ totalSize 3229
#### A masked pattern was here ####
# Storage Information
@@ -150,7 +150,7 @@ Table Parameters:
numFiles 1
numRows 100
rawDataSize 52600
- totalSize 3227
+ totalSize 3229
#### A masked pattern was here ####
# Storage Information
@@ -237,7 +237,7 @@ Table Parameters:
numFiles 1
numRows 100
rawDataSize 52600
- totalSize 3227
+ totalSize 3229
#### A masked pattern was here ####
# Storage Information
@@ -345,7 +345,7 @@ Partition Parameters:
numFiles 1
numRows 50
rawDataSize 21950
- totalSize 2129
+ totalSize 2131
#### A masked pattern was here ####
# Storage Information
@@ -386,7 +386,7 @@ Partition Parameters:
numFiles 1
numRows 50
rawDataSize 22050
- totalSize 2142
+ totalSize 2144
#### A masked pattern was here ####
# Storage Information
@@ -439,7 +439,7 @@ Partition Parameters:
numFiles 1
numRows 50
rawDataSize 21950
- totalSize 2129
+ totalSize 2131
#### A masked pattern was here ####
# Storage Information
@@ -480,7 +480,7 @@ Partition Parameters:
numFiles 1
numRows 50
rawDataSize 22050
- totalSize 2142
+ totalSize 2144
#### A masked pattern was here ####
# Storage Information
@@ -576,7 +576,7 @@ Partition Parameters:
numFiles 1
numRows 50
rawDataSize 21950
- totalSize 2129
+ totalSize 2131
#### A masked pattern was here ####
# Storage Information
@@ -617,7 +617,7 @@ Partition Parameters:
numFiles 1
numRows 50
rawDataSize 22050
- totalSize 2142
+ totalSize 2144
#### A masked pattern was here ####
# Storage Information
@@ -731,7 +731,7 @@ Partition Parameters:
numFiles 4
numRows 50
rawDataSize 21955
- totalSize 5382
+ totalSize 5390
#### A masked pattern was here ####
# Storage Information
@@ -772,7 +772,7 @@ Partition Parameters:
numFiles 4
numRows 50
rawDataSize 22043
- totalSize 5371
+ totalSize 5376
#### A masked pattern was here ####
# Storage Information
@@ -825,7 +825,7 @@ Partition Parameters:
numFiles 4
numRows 50
rawDataSize 21955
- totalSize 5382
+ totalSize 5390
#### A masked pattern was here ####
# Storage Information
@@ -866,7 +866,7 @@ Partition Parameters:
numFiles 4
numRows 50
rawDataSize 22043
- totalSize 5371
+ totalSize 5376
#### A masked pattern was here ####
# Storage Information
@@ -968,7 +968,7 @@ Partition Parameters:
numFiles 4
numRows 50
rawDataSize 21955
- totalSize 5382
+ totalSize 5390
#### A masked pattern was here ####
# Storage Information
@@ -1009,7 +1009,7 @@ Partition Parameters:
numFiles 4
numRows 50
rawDataSize 22043
- totalSize 5371
+ totalSize 5376
#### A masked pattern was here ####
# Storage Information
@@ -1117,7 +1117,7 @@ Partition Parameters:
numFiles 1
numRows 50
rawDataSize 21950
- totalSize 2129
+ totalSize 2131
#### A masked pattern was here ####
# Storage Information
@@ -1170,7 +1170,7 @@ Partition Parameters:
numFiles 1
numRows 50
rawDataSize 21950
- totalSize 2129
+ totalSize 2131
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/orc_hybrid_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/orc_hybrid_mixed_date.q.out
new file mode 100644
index 0000000..dac30cc
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/orc_hybrid_mixed_date.q.out
@@ -0,0 +1,75 @@
+PREHOOK: query: create table hybrid_table (d date)
+stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d date)
+stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/orc_hybrid_mixed_timestamp.q.out b/ql/src/test/results/clientpositive/llap/orc_hybrid_mixed_timestamp.q.out
new file mode 100644
index 0000000..e0a0231
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/orc_hybrid_mixed_timestamp.q.out
@@ -0,0 +1,75 @@
+PREHOOK: query: create table hybrid_table (d timestamp)
+stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d timestamp)
+stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/orc_legacy_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/orc_legacy_mixed_date.q.out
new file mode 100644
index 0000000..ec4c219
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/orc_legacy_mixed_date.q.out
@@ -0,0 +1,58 @@
+PREHOOK: query: create table legacy_table (d date)
+stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: create table legacy_table (d date)
+stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: load data local inpath '../../data/files/orc_legacy_mixed_dates.orc' into table legacy_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: load data local inpath '../../data/files/orc_legacy_mixed_dates.orc' into table legacy_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-27
+1014-02-17
+0947-02-16
+0200-02-10
+PREHOOK: query: drop table legacy_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@legacy_table
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: drop table legacy_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@legacy_table
+POSTHOOK: Output: default@legacy_table
diff --git a/ql/src/test/results/clientpositive/llap/orc_legacy_mixed_timestamp.q.out b/ql/src/test/results/clientpositive/llap/orc_legacy_mixed_timestamp.q.out
new file mode 100644
index 0000000..5f9aa55
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/orc_legacy_mixed_timestamp.q.out
@@ -0,0 +1,58 @@
+PREHOOK: query: create table legacy_table (ts timestamp)
+stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: create table legacy_table (ts timestamp)
+stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: load data local inpath '../../data/files/orc_legacy_mixed_timestamps.orc' into table legacy_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: load data local inpath '../../data/files/orc_legacy_mixed_timestamps.orc' into table legacy_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:08:09.123
+1014-02-11 07:08:09.123
+0947-02-11 07:08:09.123
+0200-02-11 07:08:09.123
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-27 07:08:09.123
+1014-02-17 07:08:09.123
+0947-02-16 07:08:09.123
+0200-02-10 07:08:09.123
+PREHOOK: query: drop table legacy_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@legacy_table
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: drop table legacy_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@legacy_table
+POSTHOOK: Output: default@legacy_table
diff --git a/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out b/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out
index 2b3d766..a6aca8f 100644
--- a/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_llap_counters.q.out
@@ -237,7 +237,7 @@ Table Parameters:
orc.bloom.filter.columns *
orc.row.index.stride 1000
rawDataSize 1139514
- totalSize 55694
+ totalSize 55696
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out b/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out
index 3922c46..53e18a5 100644
--- a/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_llap_counters1.q.out
@@ -237,7 +237,7 @@ Table Parameters:
orc.bloom.filter.columns *
orc.row.index.stride 1000
rawDataSize 1139514
- totalSize 55694
+ totalSize 55696
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/orc_llap_nonvector.q.out b/ql/src/test/results/clientpositive/llap/orc_llap_nonvector.q.out
index d656c3c..d76f5d8 100644
--- a/ql/src/test/results/clientpositive/llap/orc_llap_nonvector.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_llap_nonvector.q.out
@@ -1348,7 +1348,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: orc_llap_nonvector_2
- Statistics: Num rows: 12288 Data size: 4468050 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 12288 Data size: 4468070 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: ROW__ID (type: struct<writeid:bigint,bucketid:int,rowid:bigint>)
outputColumnNames: _col0
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge1.q.out b/ql/src/test/results/clientpositive/llap/orc_merge1.q.out
index 44a5d6f..3748086 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge1.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge1.q.out
@@ -167,12 +167,12 @@ POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=0).value SIMPLE [(src)s
POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 6 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 555 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 562 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 561 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 496 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 554 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 478 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 557 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 564 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 563 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 498 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 556 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 480 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: EXPLAIN
INSERT OVERWRITE TABLE orcfile_merge1b_n1 PARTITION (ds='1', part)
SELECT key, value, PMOD(HASH(key), 2) as part
@@ -350,7 +350,7 @@ POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=0).value SIMPLE [(src)
POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 1360 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 1362 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: EXPLAIN
INSERT OVERWRITE TABLE orcfile_merge1c_n1 PARTITION (ds='1', part)
SELECT key, value, PMOD(HASH(key), 2) as part
@@ -520,7 +520,7 @@ POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=0).value SIMPLE [(src)
POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 2461 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 2463 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: SELECT SUM(HASH(c)) FROM (
SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
FROM orcfile_merge1_n1 WHERE ds='1'
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge10.q.out b/ql/src/test/results/clientpositive/llap/orc_merge10.q.out
index 3bf3710..82fcd88 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge10.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge10.q.out
@@ -182,7 +182,7 @@ POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=0).value SIMPLE [(src)src.
POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 1754 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 1756 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: EXPLAIN
INSERT OVERWRITE TABLE orcfile_merge1b PARTITION (ds='1', part)
SELECT key, value, PMOD(HASH(key), 2) as part
@@ -375,7 +375,7 @@ POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=0).value SIMPLE [(src)src
POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1b PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 1754 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 1756 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: EXPLAIN
INSERT OVERWRITE TABLE orcfile_merge1c PARTITION (ds='1', part)
SELECT key, value, PMOD(HASH(key), 2) as part
@@ -560,7 +560,7 @@ POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=0).value SIMPLE [(src)src
POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1c PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 1754 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 1756 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: SELECT SUM(HASH(c)) FROM (
SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
FROM orcfile_merge1 WHERE ds='1'
@@ -672,7 +672,7 @@ POSTHOOK: type: ALTER_PARTITION_MERGE
POSTHOOK: Input: default@orcfile_merge1
POSTHOOK: Output: default@orcfile_merge1@ds=1/part=0
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 1754 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 1756 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: SELECT SUM(HASH(c)) FROM (
SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
FROM orcfile_merge1c WHERE ds='1'
@@ -748,6 +748,7 @@ File Version: 0.12 with ORC_517
Rows: 242
Compression: SNAPPY
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<key:int,value:string>
Stripe Statistics:
@@ -780,7 +781,7 @@ Stripes:
Row group indices for column 2:
Entry 0: count: 242 hasNull: false min: val_0 max: val_97 sum: 1646 positions: 0,0,0
-File length: 1754 bytes
+File length: 1756 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
@@ -798,6 +799,7 @@ File Version: 0.12 with ORC_517
Rows: 242
Compression: SNAPPY
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<key:int,value:string>
Stripe Statistics:
@@ -830,7 +832,7 @@ Stripes:
Row group indices for column 2:
Entry 0: count: 242 hasNull: false min: val_0 max: val_97 sum: 1646 positions: 0,0,0
-File length: 1754 bytes
+File length: 1756 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge11.q.out b/ql/src/test/results/clientpositive/llap/orc_merge11.q.out
index 6a8aae4..c947e8d 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge11.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge11.q.out
@@ -76,6 +76,7 @@ File Version: 0.12 with ORC_517
Rows: 50000
Compression: ZLIB
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<userid:bigint,string1:string,subtype:double,decimal1:decimal(38,0),ts:timestamp>
Stripe Statistics:
@@ -155,7 +156,7 @@ Stripes:
Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,506,294,0,232,304
Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,666,54,0,312,64
-File length: 6672 bytes
+File length: 6674 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
@@ -167,6 +168,7 @@ File Version: 0.12 with ORC_517
Rows: 50000
Compression: ZLIB
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<userid:bigint,string1:string,subtype:double,decimal1:decimal(38,0),ts:timestamp>
Stripe Statistics:
@@ -246,7 +248,7 @@ Stripes:
Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,506,294,0,232,304
Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,666,54,0,312,64
-File length: 6672 bytes
+File length: 6674 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
@@ -279,6 +281,7 @@ File Version: 0.12 with ORC_517
Rows: 100000
Compression: ZLIB
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<userid:bigint,string1:string,subtype:double,decimal1:decimal(38,0),ts:timestamp>
Stripe Statistics:
@@ -423,7 +426,7 @@ Stripes:
Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,506,294,0,232,304
Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,666,54,0,312,64
-File length: 12978 bytes
+File length: 12980 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge2.q.out b/ql/src/test/results/clientpositive/llap/orc_merge2.q.out
index 19ca90c..4843ad1 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge2.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge2.q.out
@@ -213,7 +213,7 @@ POSTHOOK: Lineage: orcfile_merge2a_n0 PARTITION(one=1,two=9,three=1).value SIMPL
POSTHOOK: Lineage: orcfile_merge2a_n0 PARTITION(one=1,two=9,three=7).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge2a_n0 PARTITION(one=1,two=9,three=7).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 349 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 351 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: SELECT SUM(HASH(c)) FROM (
SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
FROM orcfile_merge2a_n0
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge3.q.out b/ql/src/test/results/clientpositive/llap/orc_merge3.q.out
index 7b04c74..43704b2 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge3.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge3.q.out
@@ -164,7 +164,7 @@ POSTHOOK: Output: default@orcfile_merge3b_n0
POSTHOOK: Lineage: orcfile_merge3b_n0.key SIMPLE [(orcfile_merge3a_n0)orcfile_merge3a_n0.FieldSchema(name:key, type:int, comment:null), ]
POSTHOOK: Lineage: orcfile_merge3b_n0.value SIMPLE [(orcfile_merge3a_n0)orcfile_merge3a_n0.FieldSchema(name:value, type:string, comment:null), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 2572 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 2574 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: SELECT SUM(HASH(c)) FROM (
SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
FROM orcfile_merge3a_n0
diff --git a/ql/src/test/results/clientpositive/llap/orc_merge4.q.out b/ql/src/test/results/clientpositive/llap/orc_merge4.q.out
index 8e18bc2..b62276e 100644
--- a/ql/src/test/results/clientpositive/llap/orc_merge4.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_merge4.q.out
@@ -37,7 +37,7 @@ POSTHOOK: Output: default@orcfile_merge3a@ds=1
POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 2530 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 2532 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: INSERT OVERWRITE TABLE orcfile_merge3a PARTITION (ds='1')
SELECT * FROM src
PREHOOK: type: QUERY
@@ -63,9 +63,9 @@ POSTHOOK: Output: default@orcfile_merge3a@ds=2
POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 2530 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 2532 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 2530 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 2532 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE orcfile_merge3b
SELECT key, value FROM orcfile_merge3a
PREHOOK: type: QUERY
diff --git a/ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out b/ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out
index f65280e..1aa1203 100644
--- a/ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_ppd_basic.q.out
@@ -1916,7 +1916,7 @@ PREHOOK: Output: database:default
PREHOOK: Output: default@orc_ppd_1
Stage-1 FILE SYSTEM COUNTERS:
HDFS_BYTES_READ: 10583
- HDFS_BYTES_WRITTEN: 1467
+ HDFS_BYTES_WRITTEN: 1468
HDFS_READ_OPS: 5
HDFS_LARGE_READ_OPS: 0
HDFS_WRITE_OPS: 3
@@ -1952,7 +1952,7 @@ PREHOOK: type: QUERY
PREHOOK: Input: default@orc_ppd_1
PREHOOK: Output: hdfs://### HDFS PATH ###
Stage-1 FILE SYSTEM COUNTERS:
- HDFS_BYTES_READ: 1591
+ HDFS_BYTES_READ: 1592
HDFS_BYTES_WRITTEN: 101
HDFS_READ_OPS: 6
HDFS_LARGE_READ_OPS: 0
diff --git a/ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out b/ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out
index 9dcd4ec..b1679de 100644
--- a/ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out
+++ b/ql/src/test/results/clientpositive/llap/orc_ppd_schema_evol_3a.q.out
@@ -1378,7 +1378,7 @@ PREHOOK: query: alter table orc_ppd_n3 change column f f double
PREHOOK: type: ALTERTABLE_RENAMECOL
PREHOOK: Input: default@orc_ppd_n3
PREHOOK: Output: default@orc_ppd_n3
-PREHOOK: query: select count(*) from orc_ppd_n3 where f = 74.72
+PREHOOK: query: select count(*) from orc_ppd_n3 where f = 74.72000122070312
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_ppd_n3
PREHOOK: Output: hdfs://### HDFS PATH ###
@@ -1411,7 +1411,7 @@ Stage-1 INPUT COUNTERS:
INPUT_FILES_Map_1: 1
RAW_INPUT_SPLITS_Map_1: 1
2
-PREHOOK: query: select count(*) from orc_ppd_n3 where f = 74.72
+PREHOOK: query: select count(*) from orc_ppd_n3 where f = 74.72000122070312
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_ppd_n3
PREHOOK: Output: hdfs://### HDFS PATH ###
@@ -1448,7 +1448,7 @@ PREHOOK: query: alter table orc_ppd_n3 change column f f string
PREHOOK: type: ALTERTABLE_RENAMECOL
PREHOOK: Input: default@orc_ppd_n3
PREHOOK: Output: default@orc_ppd_n3
-PREHOOK: query: select count(*) from orc_ppd_n3 where f = '74.72'
+PREHOOK: query: select count(*) from orc_ppd_n3 where f = '74.72000122070312'
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_ppd_n3
PREHOOK: Output: hdfs://### HDFS PATH ###
@@ -1481,7 +1481,7 @@ Stage-1 INPUT COUNTERS:
INPUT_FILES_Map_1: 1
RAW_INPUT_SPLITS_Map_1: 1
2
-PREHOOK: query: select count(*) from orc_ppd_n3 where f = '74.72'
+PREHOOK: query: select count(*) from orc_ppd_n3 where f = '74.72000122070312'
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_ppd_n3
PREHOOK: Output: hdfs://### HDFS PATH ###
diff --git a/ql/src/test/results/clientpositive/llap/orc_proleptic_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/orc_proleptic_mixed_date.q.out
new file mode 100644
index 0000000..dac30cc
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/orc_proleptic_mixed_date.q.out
@@ -0,0 +1,75 @@
+PREHOOK: query: create table hybrid_table (d date)
+stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d date)
+stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/orc_proleptic_mixed_timestamp.q.out b/ql/src/test/results/clientpositive/llap/orc_proleptic_mixed_timestamp.q.out
new file mode 100644
index 0000000..e0a0231
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/orc_proleptic_mixed_timestamp.q.out
@@ -0,0 +1,75 @@
+PREHOOK: query: create table hybrid_table (d timestamp)
+stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d timestamp)
+stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/parquet_hybrid_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/parquet_hybrid_mixed_date.q.out
new file mode 100644
index 0000000..2a834e2
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/parquet_hybrid_mixed_date.q.out
@@ -0,0 +1,75 @@
+PREHOOK: query: create table hybrid_table (d date)
+stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d date)
+stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/parquet_hybrid_mixed_timestamp.q.out b/ql/src/test/results/clientpositive/llap/parquet_hybrid_mixed_timestamp.q.out
new file mode 100644
index 0000000..51c6e9a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/parquet_hybrid_mixed_timestamp.q.out
@@ -0,0 +1,59 @@
+PREHOOK: query: create table hybrid_table (d timestamp)
+stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: create table hybrid_table (d timestamp)
+stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@hybrid_table
+PREHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: INSERT INTO hybrid_table VALUES
+('2012-02-21 07:08:09.123'),
+('2014-02-11 07:08:09.123'),
+('1947-02-11 07:08:09.123'),
+('8200-02-11 07:08:09.123'),
+('1012-02-21 07:15:11.123'),
+('1014-02-11 07:15:11.123'),
+('0947-02-11 07:15:11.123'),
+('0200-02-11 07:15:11.123')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@hybrid_table
+POSTHOOK: Lineage: hybrid_table.d SCRIPT []
+PREHOOK: query: select * from hybrid_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from hybrid_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@hybrid_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: drop table hybrid_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@hybrid_table
+PREHOOK: Output: default@hybrid_table
+POSTHOOK: query: drop table hybrid_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@hybrid_table
+POSTHOOK: Output: default@hybrid_table
diff --git a/ql/src/test/results/clientpositive/llap/parquet_legacy_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/parquet_legacy_mixed_date.q.out
new file mode 100644
index 0000000..b6a0d70
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/parquet_legacy_mixed_date.q.out
@@ -0,0 +1,58 @@
+PREHOOK: query: create table legacy_table (d date)
+stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: create table legacy_table (d date)
+stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: load data local inpath '../../data/files/parquet_legacy_mixed_dates.parq' into table legacy_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: load data local inpath '../../data/files/parquet_legacy_mixed_dates.parq' into table legacy_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-27
+1014-02-17
+0947-02-16
+0200-02-10
+PREHOOK: query: drop table legacy_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@legacy_table
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: drop table legacy_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@legacy_table
+POSTHOOK: Output: default@legacy_table
diff --git a/ql/src/test/results/clientpositive/llap/parquet_legacy_mixed_timestamp.q.out b/ql/src/test/results/clientpositive/llap/parquet_legacy_mixed_timestamp.q.out
new file mode 100644
index 0000000..1259318
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/parquet_legacy_mixed_timestamp.q.out
@@ -0,0 +1,42 @@
+PREHOOK: query: create table legacy_table (d timestamp)
+stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: create table legacy_table (d timestamp)
+stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: load data local inpath '../../data/files/parquet_legacy_mixed_timestamps.parq' into table legacy_table
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: load data local inpath '../../data/files/parquet_legacy_mixed_timestamps.parq' into table legacy_table
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@legacy_table
+PREHOOK: query: select * from legacy_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from legacy_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@legacy_table
+#### A masked pattern was here ####
+2012-02-21 07:08:09.123
+2014-02-11 07:08:09.123
+1947-02-11 07:08:09.123
+8200-02-11 07:08:09.123
+1012-02-21 07:15:11.123
+1014-02-11 07:15:11.123
+0947-02-11 07:15:11.123
+0200-02-11 07:15:11.123
+PREHOOK: query: drop table legacy_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@legacy_table
+PREHOOK: Output: default@legacy_table
+POSTHOOK: query: drop table legacy_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@legacy_table
+POSTHOOK: Output: default@legacy_table
diff --git a/ql/src/test/results/clientpositive/llap/parquet_proleptic_mixed_date.q.out b/ql/src/test/results/clientpositive/llap/parquet_proleptic_mixed_date.q.out
new file mode 100644
index 0000000..d0f61ea
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/parquet_proleptic_mixed_date.q.out
@@ -0,0 +1,75 @@
+PREHOOK: query: create table proleptic_table (d date)
+stored as parquet
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@proleptic_table
+POSTHOOK: query: create table proleptic_table (d date)
+stored as parquet
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@proleptic_table
+PREHOOK: query: INSERT INTO proleptic_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@proleptic_table
+POSTHOOK: query: INSERT INTO proleptic_table VALUES
+('2012-02-21'),
+('2014-02-11'),
+('1947-02-11'),
+('8200-02-11'),
+('1012-02-21'),
+('1014-02-11'),
+('0947-02-11'),
+('0200-02-11')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@proleptic_table
+POSTHOOK: Lineage: proleptic_table.d SCRIPT []
+PREHOOK: query: select * from proleptic_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@proleptic_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from proleptic_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@proleptic_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: select * from proleptic_table
+PREHOOK: type: QUERY
+PREHOOK: Input: default@proleptic_table
+#### A masked pattern was here ####
+POSTHOOK: query: select * from proleptic_table
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@proleptic_table
+#### A masked pattern was here ####
+2012-02-21
+2014-02-11
+1947-02-11
+8200-02-11
+1012-02-21
+1014-02-11
+0947-02-11
+0200-02-11
+PREHOOK: query: drop table proleptic_table
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@proleptic_table
+PREHOOK: Output: default@proleptic_table
+POSTHOOK: query: drop table proleptic_table
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@proleptic_table
+POSTHOOK: Output: default@proleptic_table
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out
index 6c1c192..e375b63 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part.q.out
@@ -399,11 +399,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n9
POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n9@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 1 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n9
PREHOOK: type: DROPTABLE
@@ -694,7 +694,7 @@ insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 1 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n9
@@ -1073,7 +1073,7 @@ insert_num part c1 c2 c3 b
101 1 1.0E20 1.0E20 Infinity original
102 1 -1.0E20 -1.0E20 -Infinity original
103 1 NULL NULL NULL original
-104 1 66475.56 66475.561431 -100.35978 original
+104 1 66475.56 66475.561431 -100.35977935791016 original
105 1 9250341.0 9250340.75 NULL original
111 1 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float_n9
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out
index ec3a9f8..29b1775 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_part_llap_io.q.out
@@ -399,11 +399,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n5
POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n5@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 1 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n5
PREHOOK: type: DROPTABLE
@@ -694,7 +694,7 @@ insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 1 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n5
@@ -1073,7 +1073,7 @@ insert_num part c1 c2 c3 b
101 1 1.0E20 1.0E20 Infinity original
102 1 -1.0E20 -1.0E20 -Infinity original
103 1 NULL NULL NULL original
-104 1 66475.56 66475.561431 -100.35978 original
+104 1 66475.56 66475.561431 -100.35977935791016 original
105 1 9250341.0 9250340.75 NULL original
111 1 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float_n5
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out
index 981f551..bae4082 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table.q.out
@@ -414,11 +414,11 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n6
#### A masked pattern was here ####
insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_date_group_string_group_date_group_n6
PREHOOK: type: DROPTABLE
@@ -701,7 +701,7 @@ insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group_n6
@@ -1068,7 +1068,7 @@ insert_num c1 c2 c3 b
101 1.0E20 1.0E20 Infinity original
102 -1.0E20 -1.0E20 -Infinity original
103 NULL NULL NULL original
-104 66475.56 66475.561431 -100.35978 original
+104 66475.56 66475.561431 -100.35977935791016 original
105 9250341.0 9250340.75 NULL original
111 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float_n6
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out
index 808e308..323cc5c 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acid_table_llap_io.q.out
@@ -414,11 +414,11 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n0
#### A masked pattern was here ####
insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_date_group_string_group_date_group_n0
PREHOOK: type: DROPTABLE
@@ -701,7 +701,7 @@ insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group_n0
@@ -1068,7 +1068,7 @@ insert_num c1 c2 c3 b
101 1.0E20 1.0E20 Infinity original
102 -1.0E20 -1.0E20 -Infinity original
103 NULL NULL NULL original
-104 66475.56 66475.561431 -100.35978 original
+104 66475.56 66475.561431 -100.35977935791016 original
105 9250341.0 9250340.75 NULL original
111 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float_n0
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out
index 5823106..8872e0b 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_part_llap_io.q.out
@@ -618,11 +618,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n6
POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n6@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 1 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n6
PREHOOK: type: DROPTABLE
@@ -1149,7 +1149,7 @@ insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 1 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n6
@@ -1882,7 +1882,7 @@ insert_num part c1 c2 c3 b
101 1 1.0E20 1.0E20 Infinity original
102 1 -1.0E20 -1.0E20 -Infinity original
103 1 NULL NULL NULL original
-104 1 66475.56 66475.561431 -100.35978 original
+104 1 66475.56 66475.561431 -100.35977935791016 original
105 1 9250341.0 9250340.75 NULL original
111 1 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float_n6
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out
index f6c05a4..06630a4 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table.q.out
@@ -619,11 +619,11 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@table_change_date_group_string_group_date_group
#### A masked pattern was here ####
insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_date_group_string_group_date_group
PREHOOK: type: DROPTABLE
@@ -1126,7 +1126,7 @@ insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group
@@ -1823,7 +1823,7 @@ insert_num c1 c2 c3 b
101 1.0E20 1.0E20 Infinity original
102 -1.0E20 -1.0E20 -Infinity original
103 NULL NULL NULL original
-104 66475.56 66475.561431 -100.35978 original
+104 66475.56 66475.561431 -100.35977935791016 original
105 9250341.0 9250340.75 NULL original
111 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out
index d5a76b2..8177275 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_acidvec_table_llap_io.q.out
@@ -623,11 +623,11 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n12
#### A masked pattern was here ####
insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_date_group_string_group_date_group_n12
PREHOOK: type: DROPTABLE
@@ -1134,7 +1134,7 @@ insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group_n12
@@ -1837,7 +1837,7 @@ insert_num c1 c2 c3 b
101 1.0E20 1.0E20 Infinity original
102 -1.0E20 -1.0E20 -Infinity original
103 NULL NULL NULL original
-104 66475.56 66475.561431 -100.35978 original
+104 66475.56 66475.561431 -100.35977935791016 original
105 9250341.0 9250340.75 NULL original
111 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float_n12
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out
index 13b1886..681d8c2 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part.q.out
@@ -582,11 +582,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n7
POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n7@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 1 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n7
PREHOOK: type: DROPTABLE
@@ -977,7 +977,7 @@ insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 1 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n7
@@ -1506,7 +1506,7 @@ insert_num part c1 c2 c3 b
101 1 1.0E20 1.0E20 Infinity original
102 1 -1.0E20 -1.0E20 -Infinity original
103 1 NULL NULL NULL original
-104 1 66475.56 66475.561431 -100.35978 original
+104 1 66475.56 66475.561431 -100.35977935791016 original
105 1 9250341.0 9250340.75 NULL original
111 1 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float_n7
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out
index 178df33..09cc541 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex.q.out
@@ -196,10 +196,10 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n2@part=1
POSTHOOK: Input: default@part_change_various_various_struct1_n2@part=2
#### A masked pattern was here ####
insert_num part s1 b
-1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original
-2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"} original
-3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original
-4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original
+1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.076400756835938","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original
+2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672119140625","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"} original
+3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.5625","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original
+4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original
5 2 {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} new
6 1 {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"} new
PREHOOK: query: drop table part_change_various_various_struct1_n2
@@ -484,10 +484,10 @@ POSTHOOK: Input: default@part_add_various_various_struct2_n2@part=2
insert_num part b s2
1 1 original NULL
2 1 original NULL
-3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
-4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}
-5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
-6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
+3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.076400756835938","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
+4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672119140625","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}
+5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.5625","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
+6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
7 2 new {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}
8 1 new {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}
PREHOOK: query: drop table part_add_various_various_struct2_n2
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out
index ff6808d..9bd8e14 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_complex_llap_io.q.out
@@ -197,10 +197,10 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n5@part=1
POSTHOOK: Input: default@part_change_various_various_struct1_n5@part=2
#### A masked pattern was here ####
insert_num part s1 b
-1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original
-2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"} original
-3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original
-4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original
+1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.076400756835938","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original
+2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672119140625","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"} original
+3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.5625","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original
+4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original
5 2 {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} new
6 1 {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"} new
PREHOOK: query: drop table part_change_various_various_struct1_n5
@@ -486,10 +486,10 @@ POSTHOOK: Input: default@part_add_various_various_struct2_n5@part=2
insert_num part b s2
1 1 original NULL
2 1 original NULL
-3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
-4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}
-5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
-6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
+3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.076400756835938","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
+4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672119140625","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}
+5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.5625","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
+6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
7 2 new {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}
8 1 new {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}
PREHOOK: query: drop table part_add_various_various_struct2_n5
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out
index 81125cb..8021650 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive.q.out
@@ -534,11 +534,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n6
POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n6@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b
-101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416464868.970120000000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.34416466E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.344164648689701 [...]
+101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416464868.970117179000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.34416466E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.344164648689701 [...]
102 1 0.000000000000000000 127.000000000000000000 32767.000000000000000000 2147483647.000000000000000000 9223372036854775807.000000000000000000 NULL NULL -99999999999999999999.999999999999999999 -99999999999999999999.999000000000000000 -99999999999999999999.999000000000000000 126117919850.597000000000000000 0.0 127.0 32767.0 2.14748365E9 9.223372E18 -1.0E20 -Infinity -Infinity -3.4028233E23 -3.4028233E23 1.26117921E11 0.0 127.0 32767.0 2.147483647E9 9.223372036854776E18 -1.0E20 -Infinity [...]
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359780000000000000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270887654.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70887648E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35978 30.774 30.774 30.774 2.70887654E8 original
-105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663178839.720368500000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6317882E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.631788397203685E8 original
+104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359779357910160000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270887654.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70887648E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35977935791016 30.774 30.774 30.774 2.70887654E8 [...]
+105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663178839.720368540000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6317882E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.631788397203685E8 original
111 1 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 new
PREHOOK: query: drop table part_change_various_various_decimal_to_double_n6
PREHOOK: type: DROPTABLE
@@ -687,11 +687,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n6
POSTHOOK: Input: default@part_change_various_various_timestamp_n6@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b
-101 1 1970-01-01 00:00:00.001 1969-12-31 23:59:59.872 NULL 1969-12-07 03:28:36.352 NULL NULL NULL NULL 6229-06-28 09:54:28.970117179 6229-06-28 09:54:28.97011 6229-06-28 09:54:28.97011 1950-12-18 08:00:00 original
-102 1 1970-01-01 00:00:00 1970-01-01 00:00:00.127 1970-01-01 00:00:32.767 1970-01-25 20:31:23.647 NULL NULL 1970-01-01 00:00:00 NULL 5966-07-09 10:30:50.597 5966-07-09 10:30:50.597 5966-07-09 10:30:50.597 2049-12-18 08:00:00 original
+101 1 1970-01-01 00:00:00.001 1969-12-31 23:59:59.872 NULL 1969-12-07 03:28:36.352 NULL NULL NULL NULL 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 00:00:00 original
+102 1 1970-01-01 00:00:00 1970-01-01 00:00:00.127 1970-01-01 00:00:32.767 1970-01-25 20:31:23.647 NULL NULL NULL NULL 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 00:00:00 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 1970-01-01 00:00:00.001 1970-01-01 00:00:00.023 1970-01-01 00:00:00.834 1970-01-01 00:03:23.332 NULL 1969-12-31 23:58:19.640220643 1970-01-01 00:00:30.774 1970-01-01 18:27:55.561431 1978-08-02 13:34:14 1978-08-02 13:34:14 1978-08-02 13:34:14 2021-09-24 07:00:00 original
-105 1 1970-01-01 00:00:00 1969-12-31 23:59:59.901 1969-12-31 23:59:31.7 1969-12-31 23:43:20.008 1962-12-16 06:57:26.267 NULL 1970-01-01 12:48:34.28 1970-04-18 01:32:20.75 1991-01-07 00:20:39.72036854 1991-01-07 00:20:39.72036 1991-01-07 00:20:39.72036 2024-11-11 08:00:00 original
+104 1 1970-01-01 00:00:00.001 1970-01-01 00:00:00.023 1970-01-01 00:00:00.834 1970-01-01 00:03:23.332 NULL 1969-12-31 23:58:19.64 1970-01-01 00:00:30.774 1970-01-01 18:27:55.561431 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 00:00:00 original
+105 1 1970-01-01 00:00:00 1969-12-31 23:59:59.901 1969-12-31 23:59:31.7 1969-12-31 23:43:20.008 1962-12-16 06:57:26.267 NULL 1970-01-01 12:48:34.28 1970-04-18 01:32:20.75 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 00:00:00 original
111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new
PREHOOK: query: drop table part_change_various_various_timestamp_n6
PREHOOK: type: DROPTABLE
@@ -824,10 +824,10 @@ POSTHOOK: Input: default@part_change_various_various_date_n6
POSTHOOK: Input: default@part_change_various_various_date_n6@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 b
-101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-27 original
-102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-08 original
+101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 original
+102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 original
103 1 NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-01 original
+104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 original
105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 original
111 1 1964-01-24 1964-01-24 1964-01-24 1964-01-24 new
PREHOOK: query: drop table part_change_various_various_date_n6
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out
index e4c29f7..4fca524 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_all_primitive_llap_io.q.out
@@ -536,11 +536,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n5
POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n5@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b
-101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416464868.970120000000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.34416466E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.344164648689701 [...]
+101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416464868.970117179000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.34416466E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.344164648689701 [...]
102 1 0.000000000000000000 127.000000000000000000 32767.000000000000000000 2147483647.000000000000000000 9223372036854775807.000000000000000000 NULL NULL -99999999999999999999.999999999999999999 -99999999999999999999.999000000000000000 -99999999999999999999.999000000000000000 126117919850.597000000000000000 0.0 127.0 32767.0 2.14748365E9 9.223372E18 -1.0E20 -Infinity -Infinity -3.4028233E23 -3.4028233E23 1.26117921E11 0.0 127.0 32767.0 2.147483647E9 9.223372036854776E18 -1.0E20 -Infinity [...]
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359780000000000000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270887654.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70887648E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35978 30.774 30.774 30.774 2.70887654E8 original
-105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663178839.720368500000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6317882E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.631788397203685E8 original
+104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359779357910160000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270887654.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70887648E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35977935791016 30.774 30.774 30.774 2.70887654E8 [...]
+105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663178839.720368540000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6317882E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.631788397203685E8 original
111 1 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 new
PREHOOK: query: drop table part_change_various_various_decimal_to_double_n5
PREHOOK: type: DROPTABLE
@@ -690,11 +690,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n5
POSTHOOK: Input: default@part_change_various_various_timestamp_n5@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b
-101 1 1970-01-01 00:00:00.001 1969-12-31 23:59:59.872 NULL 1969-12-07 03:28:36.352 NULL NULL NULL NULL 6229-06-28 09:54:28.970117179 6229-06-28 09:54:28.97011 6229-06-28 09:54:28.97011 1950-12-18 08:00:00 original
-102 1 1970-01-01 00:00:00 1970-01-01 00:00:00.127 1970-01-01 00:00:32.767 1970-01-25 20:31:23.647 NULL NULL 1970-01-01 00:00:00 NULL 5966-07-09 10:30:50.597 5966-07-09 10:30:50.597 5966-07-09 10:30:50.597 2049-12-18 08:00:00 original
+101 1 1970-01-01 00:00:00.001 1969-12-31 23:59:59.872 NULL 1969-12-07 03:28:36.352 NULL NULL NULL NULL 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 00:00:00 original
+102 1 1970-01-01 00:00:00 1970-01-01 00:00:00.127 1970-01-01 00:00:32.767 1970-01-25 20:31:23.647 NULL NULL NULL NULL 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 00:00:00 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 1970-01-01 00:00:00.001 1970-01-01 00:00:00.023 1970-01-01 00:00:00.834 1970-01-01 00:03:23.332 NULL 1969-12-31 23:58:19.640220643 1970-01-01 00:00:30.774 1970-01-01 18:27:55.561431 1978-08-02 13:34:14 1978-08-02 13:34:14 1978-08-02 13:34:14 2021-09-24 07:00:00 original
-105 1 1970-01-01 00:00:00 1969-12-31 23:59:59.901 1969-12-31 23:59:31.7 1969-12-31 23:43:20.008 1962-12-16 06:57:26.267 NULL 1970-01-01 12:48:34.28 1970-04-18 01:32:20.75 1991-01-07 00:20:39.72036854 1991-01-07 00:20:39.72036 1991-01-07 00:20:39.72036 2024-11-11 08:00:00 original
+104 1 1970-01-01 00:00:00.001 1970-01-01 00:00:00.023 1970-01-01 00:00:00.834 1970-01-01 00:03:23.332 NULL 1969-12-31 23:58:19.64 1970-01-01 00:00:30.774 1970-01-01 18:27:55.561431 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 00:00:00 original
+105 1 1970-01-01 00:00:00 1969-12-31 23:59:59.901 1969-12-31 23:59:31.7 1969-12-31 23:43:20.008 1962-12-16 06:57:26.267 NULL 1970-01-01 12:48:34.28 1970-04-18 01:32:20.75 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 00:00:00 original
111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new
PREHOOK: query: drop table part_change_various_various_timestamp_n5
PREHOOK: type: DROPTABLE
@@ -828,10 +828,10 @@ POSTHOOK: Input: default@part_change_various_various_date_n5
POSTHOOK: Input: default@part_change_various_various_date_n5@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 b
-101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-27 original
-102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-08 original
+101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 original
+102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 original
103 1 NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-01 original
+104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 original
105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 original
111 1 1964-01-24 1964-01-24 1964-01-24 1964-01-24 new
PREHOOK: query: drop table part_change_various_various_date_n5
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out
index 41b2bc2..1620489 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_part_llap_io.q.out
@@ -586,11 +586,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n2
POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n2@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 1 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n2
PREHOOK: type: DROPTABLE
@@ -983,7 +983,7 @@ insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 1 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n2
@@ -1515,7 +1515,7 @@ insert_num part c1 c2 c3 b
101 1 1.0E20 1.0E20 Infinity original
102 1 -1.0E20 -1.0E20 -Infinity original
103 1 NULL NULL NULL original
-104 1 66475.56 66475.561431 -100.35978 original
+104 1 66475.56 66475.561431 -100.35977935791016 original
105 1 9250341.0 9250340.75 NULL original
111 1 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float_n2
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
index f183a0f..3bc914a 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out
@@ -543,11 +543,11 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n10
#### A masked pattern was here ####
insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_date_group_string_group_date_group_n10
PREHOOK: type: DROPTABLE
@@ -926,7 +926,7 @@ insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group_n10
@@ -1293,7 +1293,7 @@ insert_num c1 c2 c3 b
101 1.0E20 1.0E20 Infinity original
102 -1.0E20 -1.0E20 -Infinity original
103 NULL NULL NULL original
-104 66475.56 66475.561431 -100.35978 original
+104 66475.56 66475.561431 -100.35977935791016 original
105 9250341.0 9250340.75 NULL original
111 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float_n10
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out
index 3de1636..2eaf175 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table_llap_io.q.out
@@ -546,11 +546,11 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n7
#### A masked pattern was here ####
insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_date_group_string_group_date_group_n7
PREHOOK: type: DROPTABLE
@@ -931,7 +931,7 @@ insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group_n7
@@ -1298,7 +1298,7 @@ insert_num c1 c2 c3 b
101 1.0E20 1.0E20 Infinity original
102 -1.0E20 -1.0E20 -Infinity original
103 NULL NULL NULL original
-104 66475.56 66475.561431 -100.35978 original
+104 66475.56 66475.561431 -100.35977935791016 original
105 9250341.0 9250340.75 NULL original
111 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float_n7
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out
index f3f567d..4d2454f 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part.q.out
@@ -686,11 +686,11 @@ POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n4
POSTHOOK: Input: default@part_change_date_group_string_group_date_timestamp_n4@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 1 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 1 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 1 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 1 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_date_group_string_group_date_timestamp_n4
PREHOOK: type: DROPTABLE
@@ -1133,7 +1133,7 @@ insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 1 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 1 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 1 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 1 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 1 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table part_change_numeric_group_string_group_floating_string_group_n4
@@ -1740,7 +1740,7 @@ insert_num part c1 c2 c3 b
101 1 1.0E20 1.0E20 Infinity original
102 1 -1.0E20 -1.0E20 -Infinity original
103 1 NULL NULL NULL original
-104 1 66475.56 66475.561431 -100.35978 original
+104 1 66475.56 66475.561431 -100.35977935791016 original
105 1 9250341.0 9250340.75 NULL original
111 1 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table part_change_lower_to_higher_numeric_group_decimal_to_float_n4
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out
index ef125ad..348d25b 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex.q.out
@@ -222,10 +222,10 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n8@part=1
POSTHOOK: Input: default@part_change_various_various_struct1_n8@part=2
#### A masked pattern was here ####
insert_num part s1 b
-1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original
-2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"} original
-3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original
-4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original
+1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.076400756835938","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original
+2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672119140625","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"} original
+3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.5625","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original
+4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original
5 2 {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} new
6 1 {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"} new
PREHOOK: query: drop table part_change_various_various_struct1_n8
@@ -536,10 +536,10 @@ POSTHOOK: Input: default@part_add_various_various_struct2_n8@part=2
insert_num part b s2
1 1 original NULL
2 1 original NULL
-3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
-4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}
-5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
-6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
+3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.076400756835938","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
+4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672119140625","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}
+5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.5625","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
+6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
7 2 new {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}
8 1 new {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}
PREHOOK: query: drop table part_add_various_various_struct2_n8
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out
index a32c519..5aea9c6 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_complex_llap_io.q.out
@@ -223,10 +223,10 @@ POSTHOOK: Input: default@part_change_various_various_struct1_n0@part=1
POSTHOOK: Input: default@part_change_various_various_struct1_n0@part=2
#### A masked pattern was here ####
insert_num part s1 b
-1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original
-2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"} original
-3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original
-4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original
+1 1 {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.076400756835938","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"} original
+2 1 {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672119140625","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"} original
+3 1 {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.5625","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"} original
+4 1 {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"} original
5 2 {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"} new
6 1 {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"} new
PREHOOK: query: drop table part_change_various_various_struct1_n0
@@ -538,10 +538,10 @@ POSTHOOK: Input: default@part_add_various_various_struct2_n0@part=2
insert_num part b s2
1 1 original NULL
2 1 original NULL
-3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.0764","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-24 10:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
-4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-08 21:17:29.368756876","c12":"0004-09-24","c13":"6e 29 da af"}
-5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.56","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-27 19:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
-6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-09 22:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
+3 1 new {"c1":"TRUE","c2":null,"c3":null,"c4":"3244222","c5":"-99999999999","c6":"-29.076400756835938","c7":"4.70614135E8","c8":"470614135","c9":"dynamic reptile","c10":"dynamic reptile ","c11":"0004-09-22 18:26:29.519542222","c12":"2007-02-09","c13":"6e 29 da af"}
+4 1 new {"c1":null,"c2":"100","c3":null,"c4":"14","c5":"-23866739993","c6":"-3651.672119140625","c7":"46114.284799488","c8":"46114.284799488","c9":" baffling","c10":" baffling ","c11":"2007-02-09 05:17:29.368756876","c12":"0004-09-22","c13":"6e 29 da af"}
+5 2 new {"c1":"FALSE","c2":"72","c3":null,"c4":"-93222","c5":"30","c6":"-66475.5625","c7":"-66475.561431","c8":"0.561431","c9":"1","c10":"1","c11":"6229-06-28 02:54:28.970117179","c12":"5966-07-09","c13":"6e 29 da af"}
+6 2 new {"c1":null,"c2":"-90","c3":null,"c4":"3289094","c5":"46114","c6":"9250341.0","c7":"9250340.75","c8":"9250340.75","c9":"junkyard","c10":"junkyard","c11":"2002-05-10 05:29:48.990818073","c12":"1815-05-06","c13":"6e 29 da af"}
7 2 new {"c1":"true","c2":"400","c3":"44388","c4":"-100","c5":"953967041.","c6":"62.079153","c7":"718.78","c8":"1","c9":"verdict","c10":"verdict","c11":"timestamp","c12":"date","c13":"binary"}
8 1 new {"c1":"false","c2":"-67","c3":"833","c4":"63993","c5":"1255178165.77663","c6":"905070.974","c7":"-4314.7918","c8":"-1240033819","c9":"trial","c10":"trial","c11":"2016-03-0703:02:22.0","c12":"2016-03-07","c13":"binary"}
PREHOOK: query: drop table part_add_various_various_struct2_n0
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out
index b334b2d..3d6950d 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive.q.out
@@ -586,11 +586,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n0
POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n0@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b
-101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416464868.970120000000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.34416466E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.344164648689701 [...]
+101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416464868.970117179000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.34416466E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.344164648689701 [...]
102 1 0.000000000000000000 127.000000000000000000 32767.000000000000000000 2147483647.000000000000000000 9223372036854775807.000000000000000000 NULL NULL -99999999999999999999.999999999999999999 -99999999999999999999.999000000000000000 -99999999999999999999.999000000000000000 126117919850.597000000000000000 0.0 127.0 32767.0 2.14748365E9 9.223372E18 -1.0E20 -Infinity -Infinity -3.4028233E23 -3.4028233E23 1.26117921E11 0.0 127.0 32767.0 2.147483647E9 9.223372036854776E18 -1.0E20 -Infinity [...]
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359780000000000000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270887654.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70887648E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35978 30.774 30.774 30.774 2.70887654E8 original
-105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663178839.720368500000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6317882E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.631788397203685E8 original
+104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359779357910160000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270887654.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70887648E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35977935791016 30.774 30.774 30.774 2.70887654E8 [...]
+105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663178839.720368540000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6317882E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.631788397203685E8 original
111 1 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 new
PREHOOK: query: drop table part_change_various_various_decimal_to_double_n0
PREHOOK: type: DROPTABLE
@@ -765,11 +765,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n0
POSTHOOK: Input: default@part_change_various_various_timestamp_n0@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b
-101 1 1970-01-01 00:00:00.001 1969-12-31 23:59:59.872 NULL 1969-12-07 03:28:36.352 NULL NULL NULL NULL 6229-06-28 09:54:28.970117179 6229-06-28 09:54:28.97011 6229-06-28 09:54:28.97011 1950-12-18 08:00:00 original
-102 1 1970-01-01 00:00:00 1970-01-01 00:00:00.127 1970-01-01 00:00:32.767 1970-01-25 20:31:23.647 NULL NULL 1970-01-01 00:00:00 NULL 5966-07-09 10:30:50.597 5966-07-09 10:30:50.597 5966-07-09 10:30:50.597 2049-12-18 08:00:00 original
+101 1 1970-01-01 00:00:00.001 1969-12-31 23:59:59.872 NULL 1969-12-07 03:28:36.352 NULL NULL NULL NULL 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 00:00:00 original
+102 1 1970-01-01 00:00:00 1970-01-01 00:00:00.127 1970-01-01 00:00:32.767 1970-01-25 20:31:23.647 NULL NULL NULL NULL 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 00:00:00 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 1970-01-01 00:00:00.001 1970-01-01 00:00:00.023 1970-01-01 00:00:00.834 1970-01-01 00:03:23.332 NULL 1969-12-31 23:58:19.640220643 1970-01-01 00:00:30.774 1970-01-01 18:27:55.561431 1978-08-02 13:34:14 1978-08-02 13:34:14 1978-08-02 13:34:14 2021-09-24 07:00:00 original
-105 1 1970-01-01 00:00:00 1969-12-31 23:59:59.901 1969-12-31 23:59:31.7 1969-12-31 23:43:20.008 1962-12-16 06:57:26.267 NULL 1970-01-01 12:48:34.28 1970-04-18 01:32:20.75 1991-01-07 00:20:39.72036854 1991-01-07 00:20:39.72036 1991-01-07 00:20:39.72036 2024-11-11 08:00:00 original
+104 1 1970-01-01 00:00:00.001 1970-01-01 00:00:00.023 1970-01-01 00:00:00.834 1970-01-01 00:03:23.332 NULL 1969-12-31 23:58:19.64 1970-01-01 00:00:30.774 1970-01-01 18:27:55.561431 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 00:00:00 original
+105 1 1970-01-01 00:00:00 1969-12-31 23:59:59.901 1969-12-31 23:59:31.7 1969-12-31 23:43:20.008 1962-12-16 06:57:26.267 NULL 1970-01-01 12:48:34.28 1970-04-18 01:32:20.75 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 00:00:00 original
111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new
PREHOOK: query: drop table part_change_various_various_timestamp_n0
PREHOOK: type: DROPTABLE
@@ -928,10 +928,10 @@ POSTHOOK: Input: default@part_change_various_various_date_n0
POSTHOOK: Input: default@part_change_various_various_date_n0@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 b
-101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-27 original
-102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-08 original
+101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 original
+102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 original
103 1 NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-01 original
+104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 original
105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 original
111 1 1964-01-24 1964-01-24 1964-01-24 1964-01-24 new
PREHOOK: query: drop table part_change_various_various_date_n0
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out
index dc401b8..1c9da6c 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_part_all_primitive_llap_io.q.out
@@ -588,11 +588,11 @@ POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n4
POSTHOOK: Input: default@part_change_various_various_decimal_to_double_n4@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 c16 c17 c18 c19 c20 c21 c22 c23 c24 c25 c26 c27 c28 c29 c30 c31 c32 c33 b
-101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416464868.970120000000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.34416466E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.344164648689701 [...]
+101 1 1.000000000000000000 -128.000000000000000000 NULL -2147483648.000000000000000000 NULL NULL NULL 99999999999999999999.999999999999999999 99999999999999999999.999900000000000000 99999999999999999999.999900000000000000 134416464868.970117179000000000 1.0 -128.0 NULL -2.14748365E9 NULL 1.0E20 Infinity Infinity 3.4028236E24 3.4028236E24 1.34416466E11 1.0 -128.0 NULL -2.147483648E9 NULL 1.0E20 Infinity 1.7976931348623157E308 1.7976931348623157E308 1.7976931348623157E308 1.344164648689701 [...]
102 1 0.000000000000000000 127.000000000000000000 32767.000000000000000000 2147483647.000000000000000000 9223372036854775807.000000000000000000 NULL NULL -99999999999999999999.999999999999999999 -99999999999999999999.999000000000000000 -99999999999999999999.999000000000000000 126117919850.597000000000000000 0.0 127.0 32767.0 2.14748365E9 9.223372E18 -1.0E20 -Infinity -Infinity -3.4028233E23 -3.4028233E23 1.26117921E11 0.0 127.0 32767.0 2.147483647E9 9.223372036854776E18 -1.0E20 -Infinity [...]
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359780000000000000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270887654.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70887648E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35978 30.774 30.774 30.774 2.70887654E8 original
-105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663178839.720368500000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6317882E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.631788397203685E8 original
+104 1 1.000000000000000000 23.000000000000000000 834.000000000000000000 203332.000000000000000000 888888857923222.000000000000000000 -100.359779357910160000 30.774000000000000000 66475.561431000000000000 66475.561431000000000000 66475.561431000000000000 270887654.000000000000000000 1.0 23.0 834.0 203332.0 8.8888885E14 66475.56 30.774 -100.35978 -100.35978 -100.35978 2.70887648E8 1.0 23.0 834.0 203332.0 8.88888857923222E14 66475.561431 -100.35977935791016 30.774 30.774 30.774 2.70887654E8 [...]
+105 1 0.000000000000000000 -99.000000000000000000 -28300.000000000000000000 -999992.000000000000000000 -222282153733.000000000000000000 NULL 46114.280000000000000000 9250340.750000000000000000 9250340.750000000000000000 9250340.750000000000000000 663178839.720368540000000000 0.0 -99.0 -28300.0 -999992.0 -2.22282154E11 9250341.0 46114.28 NULL NULL NULL 6.6317882E8 0.0 -99.0 -28300.0 -999992.0 -2.22282153733E11 9250340.75 NULL 46114.28 46114.28 46114.28 6.631788397203685E8 original
111 1 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -46114.284799488000000000 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 -9.0E-8 new
PREHOOK: query: drop table part_change_various_various_decimal_to_double_n4
PREHOOK: type: DROPTABLE
@@ -768,11 +768,11 @@ POSTHOOK: Input: default@part_change_various_various_timestamp_n4
POSTHOOK: Input: default@part_change_various_various_timestamp_n4@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 b
-101 1 1970-01-01 00:00:00.001 1969-12-31 23:59:59.872 NULL 1969-12-07 03:28:36.352 NULL NULL NULL NULL 6229-06-28 09:54:28.970117179 6229-06-28 09:54:28.97011 6229-06-28 09:54:28.97011 1950-12-18 08:00:00 original
-102 1 1970-01-01 00:00:00 1970-01-01 00:00:00.127 1970-01-01 00:00:32.767 1970-01-25 20:31:23.647 NULL NULL 1970-01-01 00:00:00 NULL 5966-07-09 10:30:50.597 5966-07-09 10:30:50.597 5966-07-09 10:30:50.597 2049-12-18 08:00:00 original
+101 1 1970-01-01 00:00:00.001 1969-12-31 23:59:59.872 NULL 1969-12-07 03:28:36.352 NULL NULL NULL NULL 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.97011 6229-06-28 02:54:28.97011 1950-12-18 00:00:00 original
+102 1 1970-01-01 00:00:00 1970-01-01 00:00:00.127 1970-01-01 00:00:32.767 1970-01-25 20:31:23.647 NULL NULL NULL NULL 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 2049-12-18 00:00:00 original
103 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 1 1970-01-01 00:00:00.001 1970-01-01 00:00:00.023 1970-01-01 00:00:00.834 1970-01-01 00:03:23.332 NULL 1969-12-31 23:58:19.640220643 1970-01-01 00:00:30.774 1970-01-01 18:27:55.561431 1978-08-02 13:34:14 1978-08-02 13:34:14 1978-08-02 13:34:14 2021-09-24 07:00:00 original
-105 1 1970-01-01 00:00:00 1969-12-31 23:59:59.901 1969-12-31 23:59:31.7 1969-12-31 23:43:20.008 1962-12-16 06:57:26.267 NULL 1970-01-01 12:48:34.28 1970-04-18 01:32:20.75 1991-01-07 00:20:39.72036854 1991-01-07 00:20:39.72036 1991-01-07 00:20:39.72036 2024-11-11 08:00:00 original
+104 1 1970-01-01 00:00:00.001 1970-01-01 00:00:00.023 1970-01-01 00:00:00.834 1970-01-01 00:03:23.332 NULL 1969-12-31 23:58:19.64 1970-01-01 00:00:30.774 1970-01-01 18:27:55.561431 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:34:14 2021-09-24 00:00:00 original
+105 1 1970-01-01 00:00:00 1969-12-31 23:59:59.901 1969-12-31 23:59:31.7 1969-12-31 23:43:20.008 1962-12-16 06:57:26.267 NULL 1970-01-01 12:48:34.28 1970-04-18 01:32:20.75 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036 1991-01-06 16:20:39.72036 2024-11-11 00:00:00 original
111 1 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL new
PREHOOK: query: drop table part_change_various_various_timestamp_n4
PREHOOK: type: DROPTABLE
@@ -932,10 +932,10 @@ POSTHOOK: Input: default@part_change_various_various_date_n4
POSTHOOK: Input: default@part_change_various_various_date_n4@part=1
#### A masked pattern was here ####
insert_num part c1 c2 c3 c4 b
-101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-27 original
-102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-08 original
+101 1 1950-12-18 1950-12-18 1950-12-18 6229-06-28 original
+102 1 2049-12-18 2049-12-18 2049-12-18 5966-07-09 original
103 1 NULL NULL NULL NULL original
-104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-01 original
+104 1 2021-09-24 2021-09-24 2021-09-24 1978-08-02 original
105 1 2024-11-11 2024-11-11 2024-11-11 1991-01-06 original
111 1 1964-01-24 1964-01-24 1964-01-24 1964-01-24 new
PREHOOK: query: drop table part_change_various_various_date_n4
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
index 1ef9dd9..d93de4c 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out
@@ -618,11 +618,11 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n3
#### A masked pattern was here ####
insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_date_group_string_group_date_group_n3
PREHOOK: type: DROPTABLE
@@ -1051,7 +1051,7 @@ insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group_n3
@@ -1418,7 +1418,7 @@ insert_num c1 c2 c3 b
101 1.0E20 1.0E20 Infinity original
102 -1.0E20 -1.0E20 -Infinity original
103 NULL NULL NULL original
-104 66475.56 66475.561431 -100.35978 original
+104 66475.56 66475.561431 -100.35977935791016 original
105 9250341.0 9250340.75 NULL original
111 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float_n3
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out
index 6f04a10..62ee02b 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table_llap_io.q.out
@@ -621,11 +621,11 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@table_change_date_group_string_group_date_group_n5
#### A masked pattern was here ####
insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 b
-101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-27 19:54:28.970117179 6229-06-27 19:54:28.970117179 6229-06-27 19:5 6229-06-27 19:54:28.970117179 6229-06-27 19:5 original
-102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-08 20:30:50.597 5966-07-08 20:30:50.597 5966-07-08 20:3 5966-07-08 20:30:50.597 5966-07-08 20:3 original
+101 1950-12-18 1950-12-18 1950-12-18 1950-12-18 1950-12-18 6229-06-28 02:54:28.970117179 6229-06-28 02:54:28.970117179 6229-06-28 02:5 6229-06-28 02:54:28.970117179 6229-06-28 02:5 original
+102 2049-12-18 2049-12-18 2049-12-18 2049-12-18 2049-12-18 5966-07-09 03:30:50.597 5966-07-09 03:30:50.597 5966-07-09 03:3 5966-07-09 03:30:50.597 5966-07-09 03:3 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-01 23:34:14.0 1978-08-01 23:34:14.0 1978-08-01 23:3 1978-08-01 23:34:14.0 1978-08-01 23:3 original
-105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 08:20:39.72036854 1991-01-06 08:20:39.72036854 1991-01-06 08:2 1991-01-06 08:20:39.72036854 1991-01-06 08:2 original
+104 2021-09-24 2021-09-24 2021-09-24 2021-09-24 2021-09-24 1978-08-02 06:34:14 1978-08-02 06:34:14 1978-08-02 06:3 1978-08-02 06:34:14 1978-08-02 06:3 original
+105 2024-11-11 2024-11-11 2024-11-11 2024-11-11 2024-11-11 1991-01-06 16:20:39.72036854 1991-01-06 16:20:39.72036854 1991-01-06 16:2 1991-01-06 16:20:39.72036854 1991-01-06 16:2 original
111 filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_date_group_string_group_date_group_n5
PREHOOK: type: DROPTABLE
@@ -1056,7 +1056,7 @@ insert_num c1 c2 c3 c4 c5 c6 c7 c8 c9 c10 c11 c12 c13 c14 c15 b
101 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 99999999999999999999.999999999999999999 Infinity 1.7976931348623157E308 9999999 Infinit 1.79769 original
102 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 -99999999999999999999.999999999999999999 -Infinity -1.7976931348623157E308 -999999 -Infini -1.7976 original
103 NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL NULL original
-104 66475.561431 -100.35978 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 66475.561431 -100.35978 30.774 66475.5 -100.35 30.774 original
+104 66475.561431 -100.35977935791016 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 66475.561431 -100.35977935791016 30.774 66475.5 -100.35 30.774 original
105 9250340.75 NULL 46114.28 9250340.75 NULL 46114.28 9250340 NULL 46114.2 9250340.75 NULL 46114.28 9250340 NULL 46114.2 original
111 filler filler filler filler filler filler filler filler filler filler filler filler filler filler filler new
PREHOOK: query: drop table table_change_numeric_group_string_group_floating_string_group_n5
@@ -1423,7 +1423,7 @@ insert_num c1 c2 c3 b
101 1.0E20 1.0E20 Infinity original
102 -1.0E20 -1.0E20 -Infinity original
103 NULL NULL NULL original
-104 66475.56 66475.561431 -100.35978 original
+104 66475.56 66475.561431 -100.35977935791016 original
105 9250341.0 9250340.75 NULL original
111 1234.5677 9876.543 1234.5678 new
PREHOOK: query: drop table table_change_lower_to_higher_numeric_group_decimal_to_float_n5
diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_stats.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_stats.q.out
index 487089d..e3dca1d 100644
--- a/ql/src/test/results/clientpositive/llap/schema_evol_stats.q.out
+++ b/ql/src/test/results/clientpositive/llap/schema_evol_stats.q.out
@@ -294,7 +294,7 @@ Table Parameters:
numPartitions 2
numRows 8
rawDataSize 1116
- totalSize 848
+ totalSize 852
#### A masked pattern was here ####
# Storage Information
@@ -331,7 +331,7 @@ Partition Parameters:
numFiles 1
numRows 4
rawDataSize 384
- totalSize 323
+ totalSize 325
#### A masked pattern was here ####
# Storage Information
@@ -370,7 +370,7 @@ Partition Parameters:
numFiles 1
numRows 4
rawDataSize 732
- totalSize 525
+ totalSize 527
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/llap/sqlmerge_stats.q.out b/ql/src/test/results/clientpositive/llap/sqlmerge_stats.q.out
index 349d2cf..9e31c64 100644
--- a/ql/src/test/results/clientpositive/llap/sqlmerge_stats.q.out
+++ b/ql/src/test/results/clientpositive/llap/sqlmerge_stats.q.out
@@ -93,7 +93,7 @@ Table Parameters:
numFiles 1
numRows 1
rawDataSize 0
- totalSize 657
+ totalSize 659
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -426,7 +426,7 @@ Table Parameters:
numFiles 4
numRows 2
rawDataSize 0
- totalSize 2690
+ totalSize 2698
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -492,7 +492,7 @@ Table Parameters:
numFiles 6
numRows 0
rawDataSize 0
- totalSize 4052
+ totalSize 4063
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out b/ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out
index cd7dd70..cfbbf82 100644
--- a/ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out
+++ b/ql/src/test/results/clientpositive/llap/tez_fixed_bucket_pruning.q.out
@@ -744,7 +744,7 @@ STAGE PLANS:
serialization.ddl struct l3_monthly_dw_dimplan { i64 idp_warehouse_id, i64 idp_audit_id, date idp_data_date, i64 l3_snapshot_number, i64 plan_key, i64 project_key, i64 charge_code_key, i64 transclass_key, i64 resource_key, i64 finplan_detail_object_id, i64 project_object_id, i64 txn_class_object_id, i64 charge_code_object_id, i64 resoruce_object_id, varchar(1500) plan_name, varchar(500) plan_code, varchar(50) plan_type, varchar(50) period_type, varchar(3000) plan_desc [...]
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 5242697
+ totalSize 5242699
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -767,7 +767,7 @@ STAGE PLANS:
serialization.ddl struct l3_monthly_dw_dimplan { i64 idp_warehouse_id, i64 idp_audit_id, date idp_data_date, i64 l3_snapshot_number, i64 plan_key, i64 project_key, i64 charge_code_key, i64 transclass_key, i64 resource_key, i64 finplan_detail_object_id, i64 project_object_id, i64 txn_class_object_id, i64 charge_code_object_id, i64 resoruce_object_id, varchar(1500) plan_name, varchar(500) plan_code, varchar(50) plan_type, varchar(50) period_type, varchar(3000) plan_de [...]
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 5242697
+ totalSize 5242699
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
name: default.l3_monthly_dw_dimplan
@@ -1259,7 +1259,7 @@ STAGE PLANS:
serialization.ddl struct l3_monthly_dw_dimplan { i64 idp_warehouse_id, i64 idp_audit_id, date idp_data_date, i64 l3_snapshot_number, i64 plan_key, i64 project_key, i64 charge_code_key, i64 transclass_key, i64 resource_key, i64 finplan_detail_object_id, i64 project_object_id, i64 txn_class_object_id, i64 charge_code_object_id, i64 resoruce_object_id, varchar(1500) plan_name, varchar(500) plan_code, varchar(50) plan_type, varchar(50) period_type, varchar(3000) plan_desc [...]
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 5242697
+ totalSize 5242699
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -1282,7 +1282,7 @@ STAGE PLANS:
serialization.ddl struct l3_monthly_dw_dimplan { i64 idp_warehouse_id, i64 idp_audit_id, date idp_data_date, i64 l3_snapshot_number, i64 plan_key, i64 project_key, i64 charge_code_key, i64 transclass_key, i64 resource_key, i64 finplan_detail_object_id, i64 project_object_id, i64 txn_class_object_id, i64 charge_code_object_id, i64 resoruce_object_id, varchar(1500) plan_name, varchar(500) plan_code, varchar(50) plan_type, varchar(50) period_type, varchar(3000) plan_de [...]
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 5242697
+ totalSize 5242699
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
name: default.l3_monthly_dw_dimplan
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
index 736e300..02c9d66 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
@@ -3906,7 +3906,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: alltypesnullorc
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: COMPLETE
TableScan Vectorization:
native: true
Select Operator
@@ -3914,7 +3914,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: []
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: COMPLETE
Group By Operator
aggregations: count()
Group By Vectorization:
diff --git a/ql/src/test/results/clientpositive/masking_mv.q.out b/ql/src/test/results/clientpositive/masking_mv.q.out
index b96b136..1de1a5a 100644
--- a/ql/src/test/results/clientpositive/masking_mv.q.out
+++ b/ql/src/test/results/clientpositive/masking_mv.q.out
@@ -175,7 +175,7 @@ Table Parameters:
numFiles 1
numRows 500
rawDataSize 2000
- totalSize 974
+ totalSize 976
#### A masked pattern was here ####
# Storage Information
@@ -855,7 +855,7 @@ Table Parameters:
numFiles 1
numRows 500
rawDataSize 2000
- totalSize 974
+ totalSize 976
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/materialized_view_create_acid.q.out b/ql/src/test/results/clientpositive/materialized_view_create_acid.q.out
index 52d9ae1..6b4d3ca 100644
--- a/ql/src/test/results/clientpositive/materialized_view_create_acid.q.out
+++ b/ql/src/test/results/clientpositive/materialized_view_create_acid.q.out
@@ -52,7 +52,7 @@ Table Parameters:
numFiles 1
numRows 5
rawDataSize 0
- totalSize 924
+ totalSize 926
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/orc_file_dump.q.out b/ql/src/test/results/clientpositive/orc_file_dump.q.out
index 2af84f8..c206eba 100644
--- a/ql/src/test/results/clientpositive/orc_file_dump.q.out
+++ b/ql/src/test/results/clientpositive/orc_file_dump.q.out
@@ -97,6 +97,7 @@ File Version: 0.12 with ORC_517
Rows: 1049
Compression: ZLIB
Compression size: 262144
+Calendar: Julian/Gregorian
Type: struct<t:tinyint,si:smallint,i:int,b:bigint,f:float,d:double,bo:boolean,s:string,ts:timestamp,dec:decimal(4,2),bin:binary>
Stripe Statistics:
@@ -269,7 +270,7 @@ Stripes:
Entry 1: numHashFunctions: 4 bitCount: 6272 popCount: 98 loadFactor: 0.0156 expectedFpp: 5.9604645E-8
Stripe level merge: numHashFunctions: 4 bitCount: 6272 popCount: 102 loadFactor: 0.0163 expectedFpp: 6.9948186E-8
-File length: 32312 bytes
+File length: 32313 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
@@ -294,6 +295,7 @@ File Version: 0.12 with ORC_517
Rows: 1049
Compression: ZLIB
Compression size: 262144
+Calendar: Julian/Gregorian
Type: struct<t:tinyint,si:smallint,i:int,b:bigint,f:float,d:double,bo:boolean,s:string,ts:timestamp,dec:decimal(4,2),bin:binary>
Stripe Statistics:
@@ -466,7 +468,7 @@ Stripes:
Entry 1: numHashFunctions: 7 bitCount: 9600 popCount: 174 loadFactor: 0.0181 expectedFpp: 6.426078E-13
Stripe level merge: numHashFunctions: 7 bitCount: 9600 popCount: 181 loadFactor: 0.0189 expectedFpp: 8.4693775E-13
-File length: 36956 bytes
+File length: 36958 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
@@ -503,6 +505,7 @@ File Version: 0.12 with ORC_517
Rows: 1049
Compression: ZLIB
Compression size: 262144
+Calendar: Julian/Gregorian
Type: struct<t:tinyint,si:smallint,i:int,b:bigint,f:float,d:double,bo:boolean,s:string,ts:timestamp,dec:decimal(4,2),bin:binary>
Stripe Statistics:
@@ -675,7 +678,7 @@ Stripes:
Entry 1: numHashFunctions: 4 bitCount: 6272 popCount: 98 loadFactor: 0.0156 expectedFpp: 5.9604645E-8
Stripe level merge: numHashFunctions: 4 bitCount: 6272 popCount: 102 loadFactor: 0.0163 expectedFpp: 6.9948186E-8
-File length: 32312 bytes
+File length: 32313 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
diff --git a/ql/src/test/results/clientpositive/orc_merge10.q.out b/ql/src/test/results/clientpositive/orc_merge10.q.out
index 0b9b664..359f22f 100644
--- a/ql/src/test/results/clientpositive/orc_merge10.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge10.q.out
@@ -719,6 +719,7 @@ File Version: 0.12 with ORC_517
Rows: 242
Compression: SNAPPY
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<key:int,value:string>
Stripe Statistics:
@@ -751,7 +752,7 @@ Stripes:
Row group indices for column 2:
Entry 0: count: 242 hasNull: false min: val_0 max: val_97 sum: 1646 positions: 0,0,0
-File length: 1754 bytes
+File length: 1756 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
@@ -769,6 +770,7 @@ File Version: 0.12 with ORC_517
Rows: 242
Compression: SNAPPY
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<key:int,value:string>
Stripe Statistics:
@@ -801,7 +803,7 @@ Stripes:
Row group indices for column 2:
Entry 0: count: 242 hasNull: false min: val_0 max: val_97 sum: 1646 positions: 0,0,0
-File length: 1754 bytes
+File length: 1756 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
diff --git a/ql/src/test/results/clientpositive/orc_merge11.q.out b/ql/src/test/results/clientpositive/orc_merge11.q.out
index 6a8aae4..c947e8d 100644
--- a/ql/src/test/results/clientpositive/orc_merge11.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge11.q.out
@@ -76,6 +76,7 @@ File Version: 0.12 with ORC_517
Rows: 50000
Compression: ZLIB
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<userid:bigint,string1:string,subtype:double,decimal1:decimal(38,0),ts:timestamp>
Stripe Statistics:
@@ -155,7 +156,7 @@ Stripes:
Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,506,294,0,232,304
Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,666,54,0,312,64
-File length: 6672 bytes
+File length: 6674 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
@@ -167,6 +168,7 @@ File Version: 0.12 with ORC_517
Rows: 50000
Compression: ZLIB
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<userid:bigint,string1:string,subtype:double,decimal1:decimal(38,0),ts:timestamp>
Stripe Statistics:
@@ -246,7 +248,7 @@ Stripes:
Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,506,294,0,232,304
Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,666,54,0,312,64
-File length: 6672 bytes
+File length: 6674 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
@@ -279,6 +281,7 @@ File Version: 0.12 with ORC_517
Rows: 100000
Compression: ZLIB
Compression size: 4096
+Calendar: Julian/Gregorian
Type: struct<userid:bigint,string1:string,subtype:double,decimal1:decimal(38,0),ts:timestamp>
Stripe Statistics:
@@ -423,7 +426,7 @@ Stripes:
Entry 3: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,506,294,0,232,304
Entry 4: count: 10000 hasNull: false min: 1969-12-31 16:04:10.0 max: 1969-12-31 16:04:10.0 positions: 0,666,54,0,312,64
-File length: 12978 bytes
+File length: 12980 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
diff --git a/ql/src/test/results/clientpositive/orc_merge12.q.out b/ql/src/test/results/clientpositive/orc_merge12.q.out
index 7966084..166dc0b 100644
--- a/ql/src/test/results/clientpositive/orc_merge12.q.out
+++ b/ql/src/test/results/clientpositive/orc_merge12.q.out
@@ -148,6 +148,7 @@ File Version: 0.12 with ORC_517
Rows: 24576
Compression: ZLIB
Compression size: 131072
+Calendar: Julian/Gregorian
Type: struct<atinyint:tinyint,asmallint:smallint,aint:int,abigint:bigint,afloat:float,adouble:double,astring1:string,astring2:string,atimestamp1:timestamp,atimestamp2:timestamp,aboolean1:boolean,aboolean2:boolean,btinyint:tinyint,bsmallint:smallint,bint:int,bbigint:bigint,bfloat:float,bdouble:double,bstring1:string,bstring2:string,btimestamp1:timestamp,btimestamp2:timestamp,bboolean1:boolean,bboolean2:boolean,ctinyint:tinyint,csmallint:smallint,cint:int,cbigint:bigint,cfloat:float,cdoubl [...]
Stripe Statistics:
@@ -813,7 +814,7 @@ Stripes:
Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1
-File length: 3004630 bytes
+File length: 3004632 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
diff --git a/ql/src/test/results/clientpositive/orc_schema_evolution_float.q.out b/ql/src/test/results/clientpositive/orc_schema_evolution_float.q.out
index 19b5ca1..2176123 100644
--- a/ql/src/test/results/clientpositive/orc_schema_evolution_float.q.out
+++ b/ql/src/test/results/clientpositive/orc_schema_evolution_float.q.out
@@ -153,44 +153,44 @@ POSTHOOK: query: select f from float_orc
POSTHOOK: type: QUERY
POSTHOOK: Input: default@float_orc
#### A masked pattern was here ####
-0.22
-74.72
-PREHOOK: query: select f from float_orc where f=74.72
+0.2199999988079071
+74.72000122070312
+PREHOOK: query: select f from float_orc where f=74.72000122070312
PREHOOK: type: QUERY
PREHOOK: Input: default@float_orc
#### A masked pattern was here ####
-POSTHOOK: query: select f from float_orc where f=74.72
+POSTHOOK: query: select f from float_orc where f=74.72000122070312
POSTHOOK: type: QUERY
POSTHOOK: Input: default@float_orc
#### A masked pattern was here ####
-74.72
-PREHOOK: query: select f from float_orc where f=0.22
+74.72000122070312
+PREHOOK: query: select f from float_orc where f=0.2199999988079071
PREHOOK: type: QUERY
PREHOOK: Input: default@float_orc
#### A masked pattern was here ####
-POSTHOOK: query: select f from float_orc where f=0.22
+POSTHOOK: query: select f from float_orc where f=0.2199999988079071
POSTHOOK: type: QUERY
POSTHOOK: Input: default@float_orc
#### A masked pattern was here ####
-0.22
-PREHOOK: query: select f from float_orc where f=74.72
+0.2199999988079071
+PREHOOK: query: select f from float_orc where f=74.72000122070312
PREHOOK: type: QUERY
PREHOOK: Input: default@float_orc
#### A masked pattern was here ####
-POSTHOOK: query: select f from float_orc where f=74.72
+POSTHOOK: query: select f from float_orc where f=74.72000122070312
POSTHOOK: type: QUERY
POSTHOOK: Input: default@float_orc
#### A masked pattern was here ####
-74.72
-PREHOOK: query: select f from float_orc where f=0.22
+74.72000122070312
+PREHOOK: query: select f from float_orc where f=0.2199999988079071
PREHOOK: type: QUERY
PREHOOK: Input: default@float_orc
#### A masked pattern was here ####
-POSTHOOK: query: select f from float_orc where f=0.22
+POSTHOOK: query: select f from float_orc where f=0.2199999988079071
POSTHOOK: type: QUERY
POSTHOOK: Input: default@float_orc
#### A masked pattern was here ####
-0.22
+0.2199999988079071
PREHOOK: query: alter table float_orc change column f f decimal(14,5)
PREHOOK: type: ALTERTABLE_RENAMECOL
PREHOOK: Input: default@float_orc
diff --git a/ql/src/test/results/clientpositive/parquet_ppd_date.q.out b/ql/src/test/results/clientpositive/parquet_ppd_date.q.out
index d9f6846..a1966e2 100644
--- a/ql/src/test/results/clientpositive/parquet_ppd_date.q.out
+++ b/ql/src/test/results/clientpositive/parquet_ppd_date.q.out
@@ -431,3 +431,119 @@ POSTHOOK: query: select * from newtypestbl_n2 where da between '1970-02-18' and
POSTHOOK: type: QUERY
POSTHOOK: Input: default@newtypestbl_n2
#### A masked pattern was here ####
+PREHOOK: query: insert overwrite table newtypestbl_n2 select * from (select cast("apple" as char(10)), cast("bee" as varchar(10)), 0.22, cast("999-02-20" as date) from src src1 union all select cast("hello" as char(10)), cast("world" as varchar(10)), 11.22, cast("1820-02-27" as date) from src src2 limit 10) uniontbl
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@newtypestbl_n2
+POSTHOOK: query: insert overwrite table newtypestbl_n2 select * from (select cast("apple" as char(10)), cast("bee" as varchar(10)), 0.22, cast("999-02-20" as date) from src src1 union all select cast("hello" as char(10)), cast("world" as varchar(10)), 11.22, cast("1820-02-27" as date) from src src2 limit 10) uniontbl
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@newtypestbl_n2
+POSTHOOK: Lineage: newtypestbl_n2.c EXPRESSION []
+POSTHOOK: Lineage: newtypestbl_n2.d EXPRESSION []
+POSTHOOK: Lineage: newtypestbl_n2.da EXPRESSION []
+POSTHOOK: Lineage: newtypestbl_n2.v EXPRESSION []
+PREHOOK: query: select * from newtypestbl_n2 where da='999-02-20'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from newtypestbl_n2 where da='999-02-20'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+PREHOOK: query: select * from newtypestbl_n2 where da='999-02-20'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from newtypestbl_n2 where da='999-02-20'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+PREHOOK: query: select * from newtypestbl_n2 where da=cast('999-02-20' as date)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from newtypestbl_n2 where da=cast('999-02-20' as date)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+PREHOOK: query: select * from newtypestbl_n2 where da=cast('999-02-20' as date)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from newtypestbl_n2 where da=cast('999-02-20' as date)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+PREHOOK: query: select * from newtypestbl_n2 where da='999-02-20'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from newtypestbl_n2 where da='999-02-20'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+PREHOOK: query: select * from newtypestbl_n2 where da='999-02-20'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from newtypestbl_n2 where da='999-02-20'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+PREHOOK: query: select * from newtypestbl_n2 where da=cast('999-02-20' as date)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from newtypestbl_n2 where da=cast('999-02-20' as date)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+PREHOOK: query: select * from newtypestbl_n2 where da=cast('999-02-20' as date)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+POSTHOOK: query: select * from newtypestbl_n2 where da=cast('999-02-20' as date)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@newtypestbl_n2
+#### A masked pattern was here ####
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
+apple bee 0.220 0999-02-20
diff --git a/ql/src/test/results/clientpositive/parquet_stats.q.out b/ql/src/test/results/clientpositive/parquet_stats.q.out
index e821a75..8b5b02b 100644
--- a/ql/src/test/results/clientpositive/parquet_stats.q.out
+++ b/ql/src/test/results/clientpositive/parquet_stats.q.out
@@ -48,7 +48,7 @@ Table Parameters:
numFiles 1
numRows 2
rawDataSize 98
- totalSize 556
+ totalSize 587
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/row__id.q.out b/ql/src/test/results/clientpositive/row__id.q.out
index a544621..a1123ac 100644
--- a/ql/src/test/results/clientpositive/row__id.q.out
+++ b/ql/src/test/results/clientpositive/row__id.q.out
@@ -72,25 +72,25 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: hello_acid
- Statistics: Num rows: 3 Data size: 19884 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 19944 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: ROW__ID.writeid (type: bigint)
outputColumnNames: _col0
- Statistics: Num rows: 3 Data size: 19884 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 19944 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: bigint)
null sort order: z
sort order: +
- Statistics: Num rows: 3 Data size: 19884 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 19944 Basic stats: COMPLETE Column stats: NONE
Execution mode: vectorized
Reduce Operator Tree:
Select Operator
expressions: KEY.reducesinkkey0 (type: bigint)
outputColumnNames: _col0
- Statistics: Num rows: 3 Data size: 19884 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 19944 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 3 Data size: 19884 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 19944 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -146,17 +146,17 @@ STAGE PLANS:
TableScan
alias: hello_acid
filterExpr: (ROW__ID.writeid = 3L) (type: boolean)
- Statistics: Num rows: 3 Data size: 19884 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 19944 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (ROW__ID.writeid = 3L) (type: boolean)
- Statistics: Num rows: 1 Data size: 6628 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 6648 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: ROW__ID.writeid (type: bigint)
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 6628 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 6648 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 6628 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 6648 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.SequenceFileInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
diff --git a/ql/src/test/results/clientpositive/spark/alter_merge_orc.q.out b/ql/src/test/results/clientpositive/spark/alter_merge_orc.q.out
index cf90940..571651c 100644
--- a/ql/src/test/results/clientpositive/spark/alter_merge_orc.q.out
+++ b/ql/src/test/results/clientpositive/spark/alter_merge_orc.q.out
@@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value}
partitioned:false
partitionColumns:
totalNumberFiles:3
-totalFileSize:7590
-maxFileSize:2530
-minFileSize:2530
+totalFileSize:7596
+maxFileSize:2532
+minFileSize:2532
#### A masked pattern was here ####
PREHOOK: query: select count(1) from src_orc_merge_test
@@ -91,9 +91,9 @@ columns:struct columns { i32 key, string value}
partitioned:false
partitionColumns:
totalNumberFiles:1
-totalFileSize:7214
-maxFileSize:7214
-minFileSize:7214
+totalFileSize:7216
+maxFileSize:7216
+minFileSize:7216
#### A masked pattern was here ####
PREHOOK: query: select count(1) from src_orc_merge_test
@@ -171,9 +171,9 @@ columns:struct columns { i32 key, string value}
partitioned:true
partitionColumns:struct partition_columns { string ds}
totalNumberFiles:3
-totalFileSize:7590
-maxFileSize:2530
-minFileSize:2530
+totalFileSize:7596
+maxFileSize:2532
+minFileSize:2532
#### A masked pattern was here ####
PREHOOK: query: select count(1) from src_orc_merge_test_part_n2
@@ -218,9 +218,9 @@ columns:struct columns { i32 key, string value}
partitioned:true
partitionColumns:struct partition_columns { string ds}
totalNumberFiles:1
-totalFileSize:7214
-maxFileSize:7214
-minFileSize:7214
+totalFileSize:7216
+maxFileSize:7216
+minFileSize:7216
#### A masked pattern was here ####
PREHOOK: query: select count(1) from src_orc_merge_test_part_n2
diff --git a/ql/src/test/results/clientpositive/spark/alter_merge_stats_orc.q.out b/ql/src/test/results/clientpositive/spark/alter_merge_stats_orc.q.out
index 3d380aa..f0cb9d7 100644
--- a/ql/src/test/results/clientpositive/spark/alter_merge_stats_orc.q.out
+++ b/ql/src/test/results/clientpositive/spark/alter_merge_stats_orc.q.out
@@ -48,9 +48,9 @@ columns:struct columns { i32 key, string value}
partitioned:false
partitionColumns:
totalNumberFiles:3
-totalFileSize:7590
-maxFileSize:2530
-minFileSize:2530
+totalFileSize:7596
+maxFileSize:2532
+minFileSize:2532
#### A masked pattern was here ####
PREHOOK: query: desc extended src_orc_merge_test_stat
@@ -93,7 +93,7 @@ Table Parameters:
numFiles 3
numRows 1500
rawDataSize 141000
- totalSize 7590
+ totalSize 7596
#### A masked pattern was here ####
# Storage Information
@@ -144,7 +144,7 @@ Table Parameters:
numFiles 1
numRows 1500
rawDataSize 141000
- totalSize 7214
+ totalSize 7216
#### A masked pattern was here ####
# Storage Information
@@ -214,9 +214,9 @@ columns:struct columns { i32 key, string value}
partitioned:true
partitionColumns:struct partition_columns { string ds}
totalNumberFiles:3
-totalFileSize:7590
-maxFileSize:2530
-minFileSize:2530
+totalFileSize:7596
+maxFileSize:2532
+minFileSize:2532
#### A masked pattern was here ####
PREHOOK: query: desc formatted src_orc_merge_test_part_stat partition (ds='2011')
@@ -243,7 +243,7 @@ Partition Parameters:
numFiles 3
numRows 1500
rawDataSize 141000
- totalSize 7590
+ totalSize 7596
#### A masked pattern was here ####
# Storage Information
@@ -290,7 +290,7 @@ Partition Parameters:
numFiles 3
numRows 1500
rawDataSize 141000
- totalSize 7590
+ totalSize 7596
#### A masked pattern was here ####
# Storage Information
@@ -345,7 +345,7 @@ Partition Parameters:
numFiles 1
numRows 1500
rawDataSize 141000
- totalSize 7214
+ totalSize 7216
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out b/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out
index e773a72..3e6af3c 100644
--- a/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket_map_join_tez2.q.out
@@ -2527,7 +2527,7 @@ STAGE PLANS:
serialization.ddl struct my_dim { string join_col, string filter_col}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 338
+ totalSize 340
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
@@ -2549,7 +2549,7 @@ STAGE PLANS:
serialization.ddl struct my_dim { string join_col, string filter_col}
serialization.format 1
serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 338
+ totalSize 340
#### A masked pattern was here ####
serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
name: default.my_dim
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge1.q.out b/ql/src/test/results/clientpositive/spark/orc_merge1.q.out
index 5c95429..d7bba28 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge1.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge1.q.out
@@ -116,8 +116,8 @@ POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=0).value SIMPLE [(src)s
POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 2 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 752 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 1056 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 754 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 1058 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: EXPLAIN
INSERT OVERWRITE TABLE orcfile_merge1b_n1 PARTITION (ds='1', part)
SELECT key, value, PMOD(HASH(key), 2) as part
@@ -244,7 +244,7 @@ POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=0).value SIMPLE [(src)
POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1b_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 1360 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 1362 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: EXPLAIN
INSERT OVERWRITE TABLE orcfile_merge1c_n1 PARTITION (ds='1', part)
SELECT key, value, PMOD(HASH(key), 2) as part
@@ -363,7 +363,7 @@ POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=0).value SIMPLE [(src)
POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge1c_n1 PARTITION(ds=1,part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 1669 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 1671 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: SELECT SUM(HASH(c)) FROM (
SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
FROM orcfile_merge1_n1 WHERE ds='1'
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge2.q.out b/ql/src/test/results/clientpositive/spark/orc_merge2.q.out
index 089be29..8e2b2e6 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge2.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge2.q.out
@@ -204,7 +204,7 @@ POSTHOOK: Lineage: orcfile_merge2a_n0 PARTITION(one=1,two=9,three=1).value SIMPL
POSTHOOK: Lineage: orcfile_merge2a_n0 PARTITION(one=1,two=9,three=7).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge2a_n0 PARTITION(one=1,two=9,three=7).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 349 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 351 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: SELECT SUM(HASH(c)) FROM (
SELECT TRANSFORM(*) USING 'tr \t _' AS (c)
FROM orcfile_merge2a_n0
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge3.q.out b/ql/src/test/results/clientpositive/spark/orc_merge3.q.out
index 1b326ab..87afb37 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge3.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge3.q.out
@@ -163,7 +163,7 @@ POSTHOOK: Output: default@orcfile_merge3b_n0
POSTHOOK: Lineage: orcfile_merge3b_n0.key SIMPLE [(orcfile_merge3a_n0)orcfile_merge3a_n0.FieldSchema(name:key, type:int, comment:null), ]
POSTHOOK: Lineage: orcfile_merge3b_n0.value SIMPLE [(orcfile_merge3a_n0)orcfile_merge3a_n0.FieldSchema(name:value, type:string, comment:null), ]
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 4875 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 4877 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: SELECT SUM(HASH(c)) FROM (
SELECT TRANSFORM(key, value) USING 'tr \t _' AS (c)
FROM orcfile_merge3a_n0
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge4.q.out b/ql/src/test/results/clientpositive/spark/orc_merge4.q.out
index ada109e..83838f9 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge4.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge4.q.out
@@ -37,7 +37,7 @@ POSTHOOK: Output: default@orcfile_merge3a@ds=1
POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 2530 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 2532 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: INSERT OVERWRITE TABLE orcfile_merge3a PARTITION (ds='1')
SELECT * FROM src
PREHOOK: type: QUERY
@@ -63,9 +63,9 @@ POSTHOOK: Output: default@orcfile_merge3a@ds=2
POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=2).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ]
POSTHOOK: Lineage: orcfile_merge3a PARTITION(ds=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ]
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 2530 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 2532 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 2530 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 2532 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: EXPLAIN INSERT OVERWRITE TABLE orcfile_merge3b
SELECT key, value FROM orcfile_merge3a
PREHOOK: type: QUERY
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge5.q.out b/ql/src/test/results/clientpositive/spark/orc_merge5.q.out
index e72d1d2..3065ea1 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge5.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge5.q.out
@@ -98,7 +98,7 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@orc_merge5b_n0
POSTHOOK: Output: default@orc_merge5b_n0
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select * from orc_merge5b_n0
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_merge5b_n0
@@ -228,7 +228,7 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@orc_merge5b_n0
POSTHOOK: Output: default@orc_merge5b_n0
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select * from orc_merge5b_n0
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_merge5b_n0
@@ -262,7 +262,7 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@orc_merge5b_n0
POSTHOOK: Output: default@orc_merge5b_n0
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select * from orc_merge5b_n0
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_merge5b_n0
@@ -323,7 +323,7 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@orc_merge5b_n0
POSTHOOK: Output: default@orc_merge5b_n0
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select * from orc_merge5b_n0
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_merge5b_n0
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge6.q.out b/ql/src/test/results/clientpositive/spark/orc_merge6.q.out
index b3d1ca4..ccd7cda 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge6.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge6.q.out
@@ -126,9 +126,9 @@ POSTHOOK: Input: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a_n1
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a_n1
@@ -301,9 +301,9 @@ POSTHOOK: Input: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a_n1
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a_n1
@@ -377,9 +377,9 @@ POSTHOOK: Input: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a_n1
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a_n1
@@ -481,9 +481,9 @@ POSTHOOK: Input: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1
POSTHOOK: Output: default@orc_merge5a_n1@year=2001/hour=24
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a_n1
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a_n1
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge7.q.out b/ql/src/test/results/clientpositive/spark/orc_merge7.q.out
index 5c36c68..bf7e98f 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge7.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge7.q.out
@@ -171,9 +171,9 @@ POSTHOOK: Input: default@orc_merge5a_n0
POSTHOOK: Output: default@orc_merge5a_n0
POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 603 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 604 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a_n0
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a_n0
@@ -394,9 +394,9 @@ POSTHOOK: Input: default@orc_merge5a_n0
POSTHOOK: Output: default@orc_merge5a_n0
POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 603 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 604 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a_n0
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a_n0
@@ -509,9 +509,9 @@ POSTHOOK: Input: default@orc_merge5a_n0
POSTHOOK: Output: default@orc_merge5a_n0
POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 603 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 604 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a_n0
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a_n0
@@ -614,9 +614,9 @@ POSTHOOK: Input: default@orc_merge5a_n0
POSTHOOK: Output: default@orc_merge5a_n0
POSTHOOK: Output: default@orc_merge5a_n0@st=0.8
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 603 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 604 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a_n0
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a_n0
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge8.q.out b/ql/src/test/results/clientpositive/spark/orc_merge8.q.out
index 10b59ef..cbf7a6c 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge8.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge8.q.out
@@ -117,10 +117,10 @@ POSTHOOK: Lineage: alltypes_orc_n1.ti SIMPLE [(alltypes_n1)alltypes_n1.FieldSche
POSTHOOK: Lineage: alltypes_orc_n1.ts SIMPLE [(alltypes_n1)alltypes_n1.FieldSchema(name:ts, type:timestamp, comment:null), ]
POSTHOOK: Lineage: alltypes_orc_n1.vc SIMPLE [(alltypes_n1)alltypes_n1.FieldSchema(name:vc, type:varchar(5), comment:null), ]
Found 4 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 1638 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 1638 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 1649 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 1649 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 1640 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 1640 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 1650 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 1650 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: alter table alltypes_orc_n1 concatenate
PREHOOK: type: ALTER_TABLE_MERGE
PREHOOK: Input: default@alltypes_orc_n1
@@ -130,4 +130,4 @@ POSTHOOK: type: ALTER_TABLE_MERGE
POSTHOOK: Input: default@alltypes_orc_n1
POSTHOOK: Output: default@alltypes_orc_n1
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 4650 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 4652 ### HDFS DATE ### hdfs://### HDFS PATH ###
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge9.q.out b/ql/src/test/results/clientpositive/spark/orc_merge9.q.out
index 6f7f0fd..2731f93 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge9.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge9.q.out
@@ -64,7 +64,7 @@ POSTHOOK: Input: default@ts_merge
POSTHOOK: Output: hdfs://### HDFS PATH ###
50000
Found 1 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 4260 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 4262 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: create table a_merge like alltypesorc
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
@@ -102,7 +102,7 @@ POSTHOOK: type: LOAD
#### A masked pattern was here ####
POSTHOOK: Output: default@a_merge
Found 2 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 295765 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 295767 ### HDFS DATE ### hdfs://### HDFS PATH ###
-rw-r--r-- 3 ### USER ### ### GROUP ### 295616 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select count(*) from a_merge
PREHOOK: type: QUERY
@@ -131,7 +131,7 @@ POSTHOOK: Input: default@a_merge
POSTHOOK: Output: hdfs://### HDFS PATH ###
24576
Found 2 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 295765 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 295767 ### HDFS DATE ### hdfs://### HDFS PATH ###
-rw-r--r-- 3 ### USER ### ### GROUP ### 295616 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: insert into table a_merge select * from alltypesorc
PREHOOK: type: QUERY
@@ -154,9 +154,9 @@ POSTHOOK: Lineage: a_merge.ctimestamp1 SIMPLE [(alltypesorc)alltypesorc.FieldSch
POSTHOOK: Lineage: a_merge.ctimestamp2 SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctimestamp2, type:timestamp, comment:null), ]
POSTHOOK: Lineage: a_merge.ctinyint SIMPLE [(alltypesorc)alltypesorc.FieldSchema(name:ctinyint, type:tinyint, comment:null), ]
Found 3 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 295765 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 295767 ### HDFS DATE ### hdfs://### HDFS PATH ###
-rw-r--r-- 3 ### USER ### ### GROUP ### 295616 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 295765 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 295767 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select count(*) from a_merge
PREHOOK: type: QUERY
PREHOOK: Input: default@a_merge
@@ -184,5 +184,5 @@ POSTHOOK: Input: default@a_merge
POSTHOOK: Output: hdfs://### HDFS PATH ###
36864
Found 2 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 590713 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 590715 ### HDFS DATE ### hdfs://### HDFS PATH ###
-rw-r--r-- 3 ### USER ### ### GROUP ### 295616 ### HDFS DATE ### hdfs://### HDFS PATH ###
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out b/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out
index dcdb2b7..a7dd362 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge_incompat1.q.out
@@ -163,12 +163,12 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@orc_merge5b
POSTHOOK: Output: default@orc_merge5b
Found 6 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 668 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 679 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 679 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 679 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 670 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 681 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 681 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 681 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select * from orc_merge5b
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_merge5b
@@ -212,10 +212,10 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@orc_merge5b
POSTHOOK: Output: default@orc_merge5b
Found 4 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 1320 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 679 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 679 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 679 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 1322 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 681 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 681 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 681 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select * from orc_merge5b
PREHOOK: type: QUERY
PREHOOK: Input: default@orc_merge5b
diff --git a/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out b/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
index 7dac967..7d53cf6 100644
--- a/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
+++ b/ql/src/test/results/clientpositive/spark/orc_merge_incompat2.q.out
@@ -235,13 +235,13 @@ POSTHOOK: Output: default@orc_merge5a@st=0.8
Found 4 items
-rw-r--r-- 3 ### USER ### ### GROUP ### 602 ### HDFS DATE ### hdfs://### HDFS PATH ###
-rw-r--r-- 3 ### USER ### ### GROUP ### 602 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 602 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 602 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 604 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 604 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 4 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a
@@ -353,13 +353,13 @@ POSTHOOK: Input: default@orc_merge5a
POSTHOOK: Output: default@orc_merge5a
POSTHOOK: Output: default@orc_merge5a@st=0.8
Found 3 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 898 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 602 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 602 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 900 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 604 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 604 ### HDFS DATE ### hdfs://### HDFS PATH ###
Found 3 items
--rw-r--r-- 3 ### USER ### ### GROUP ### 956 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-r--r-- 3 ### USER ### ### GROUP ### 645 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 958 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-r--r-- 3 ### USER ### ### GROUP ### 647 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: show partitions orc_merge5a
PREHOOK: type: SHOWPARTITIONS
PREHOOK: Input: default@orc_merge5a
diff --git a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
index 4509002..0c44a18 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
@@ -3797,7 +3797,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: alltypesnullorc
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: COMPLETE
TableScan Vectorization:
native: true
Select Operator
@@ -3805,7 +3805,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: []
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: COMPLETE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: COMPLETE
Group By Operator
aggregations: count()
Group By Vectorization:
@@ -3914,7 +3914,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: alltypesnullorc
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -3924,7 +3924,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [0]
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(ctinyint)
Group By Vectorization:
@@ -4033,7 +4033,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: alltypesnullorc
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -4043,7 +4043,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [2]
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(cint)
Group By Vectorization:
@@ -4152,7 +4152,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: alltypesnullorc
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -4162,7 +4162,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [4]
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(cfloat)
Group By Vectorization:
@@ -4271,7 +4271,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: alltypesnullorc
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -4281,7 +4281,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [6]
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(cstring1)
Group By Vectorization:
@@ -4390,7 +4390,7 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: alltypesnullorc
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
TableScan Vectorization:
native: true
Select Operator
@@ -4400,7 +4400,7 @@ STAGE PLANS:
className: VectorSelectOperator
native: true
projectedOutputColumnNums: [10]
- Statistics: Num rows: 12288 Data size: 9450 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 12288 Data size: 9470 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(cboolean1)
Group By Vectorization:
diff --git a/ql/src/test/results/clientpositive/stats_nonpart.q.out b/ql/src/test/results/clientpositive/stats_nonpart.q.out
index 53351d0..0de4916 100644
--- a/ql/src/test/results/clientpositive/stats_nonpart.q.out
+++ b/ql/src/test/results/clientpositive/stats_nonpart.q.out
@@ -223,7 +223,7 @@ Table Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 719
+ totalSize 720
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/stats_part.q.out b/ql/src/test/results/clientpositive/stats_part.q.out
index 0351540..82259f6 100644
--- a/ql/src/test/results/clientpositive/stats_part.q.out
+++ b/ql/src/test/results/clientpositive/stats_part.q.out
@@ -220,7 +220,7 @@ Table Parameters:
numPartitions 3
numRows 6
rawDataSize 0
- totalSize 2244
+ totalSize 2250
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -283,7 +283,7 @@ Table Parameters:
numPartitions 3
numRows 8
rawDataSize 0
- totalSize 2998
+ totalSize 3006
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -442,7 +442,7 @@ Table Parameters:
numPartitions 3
numRows 8
rawDataSize 0
- totalSize 2998
+ totalSize 3006
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -529,7 +529,7 @@ Table Parameters:
numPartitions 3
numRows 8
rawDataSize 0
- totalSize 2998
+ totalSize 3006
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/stats_part2.q.out b/ql/src/test/results/clientpositive/stats_part2.q.out
index 1db34fb..ceb3f51 100644
--- a/ql/src/test/results/clientpositive/stats_part2.q.out
+++ b/ql/src/test/results/clientpositive/stats_part2.q.out
@@ -259,7 +259,7 @@ Table Parameters:
numPartitions 3
numRows 6
rawDataSize 0
- totalSize 2335
+ totalSize 2341
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -358,7 +358,7 @@ Table Parameters:
numPartitions 3
numRows 8
rawDataSize 0
- totalSize 3124
+ totalSize 3132
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -514,7 +514,7 @@ Partition Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 756
+ totalSize 758
#### A masked pattern was here ####
# Storage Information
@@ -551,7 +551,7 @@ Partition Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 789
+ totalSize 791
#### A masked pattern was here ####
# Storage Information
@@ -588,7 +588,7 @@ Partition Parameters:
numFiles 2
numRows 4
rawDataSize 0
- totalSize 1579
+ totalSize 1583
#### A masked pattern was here ####
# Storage Information
@@ -712,7 +712,7 @@ Partition Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 756
+ totalSize 758
#### A masked pattern was here ####
# Storage Information
@@ -749,7 +749,7 @@ Partition Parameters:
numFiles 3
numRows 2
rawDataSize 0
- totalSize 2235
+ totalSize 2240
#### A masked pattern was here ####
# Storage Information
@@ -786,7 +786,7 @@ Partition Parameters:
numFiles 2
numRows 4
rawDataSize 0
- totalSize 1579
+ totalSize 1583
#### A masked pattern was here ####
# Storage Information
@@ -852,7 +852,7 @@ Partition Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 756
+ totalSize 758
#### A masked pattern was here ####
# Storage Information
@@ -889,7 +889,7 @@ Partition Parameters:
numFiles 3
numRows 2
rawDataSize 0
- totalSize 2235
+ totalSize 2240
#### A masked pattern was here ####
# Storage Information
@@ -926,7 +926,7 @@ Partition Parameters:
numFiles 2
numRows 4
rawDataSize 0
- totalSize 1579
+ totalSize 1583
#### A masked pattern was here ####
# Storage Information
@@ -996,7 +996,7 @@ Partition Parameters:
numFiles 2
numRows 1
rawDataSize 0
- totalSize 1453
+ totalSize 1455
#### A masked pattern was here ####
# Storage Information
@@ -1033,7 +1033,7 @@ Partition Parameters:
numFiles 4
numRows 1
rawDataSize 0
- totalSize 2929
+ totalSize 2935
#### A masked pattern was here ####
# Storage Information
@@ -1070,7 +1070,7 @@ Partition Parameters:
numFiles 2
numRows 4
rawDataSize 0
- totalSize 1579
+ totalSize 1583
#### A masked pattern was here ####
# Storage Information
diff --git a/ql/src/test/results/clientpositive/stats_sizebug.q.out b/ql/src/test/results/clientpositive/stats_sizebug.q.out
index 7bcf553..984aaf5 100644
--- a/ql/src/test/results/clientpositive/stats_sizebug.q.out
+++ b/ql/src/test/results/clientpositive/stats_sizebug.q.out
@@ -159,7 +159,7 @@ Table Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 718
+ totalSize 720
transactional true
transactional_properties default
#### A masked pattern was here ####
@@ -206,7 +206,7 @@ Table Parameters:
numFiles 1
numRows 2
rawDataSize 0
- totalSize 718
+ totalSize 720
transactional true
transactional_properties default
#### A masked pattern was here ####
diff --git a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
index c1c5206..02b4f6b 100644
--- a/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
+++ b/ql/src/test/results/clientpositive/tez/acid_vectorization_original_tez.q.out
@@ -370,10 +370,10 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie
POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ]
POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ]
Found 4 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 8715 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7499 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7141 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7023 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 8716 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7500 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7142 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7024 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: insert into over10k_orc_bucketed_n0 select * from over10k_n9
PREHOOK: type: QUERY
PREHOOK: Input: default@over10k_n9
@@ -394,14 +394,14 @@ POSTHOOK: Lineage: over10k_orc_bucketed_n0.si SIMPLE [(over10k_n9)over10k_n9.Fie
POSTHOOK: Lineage: over10k_orc_bucketed_n0.t SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:t, type:tinyint, comment:null), ]
POSTHOOK: Lineage: over10k_orc_bucketed_n0.ts SIMPLE [(over10k_n9)over10k_n9.FieldSchema(name:ts, type:timestamp, comment:null), ]
Found 8 items
--rw-rw-rw- 3 ### USER ### ### GROUP ### 8715 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 8715 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7499 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7499 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7141 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7141 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7023 ### HDFS DATE ### hdfs://### HDFS PATH ###
--rw-rw-rw- 3 ### USER ### ### GROUP ### 7023 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 8716 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 8716 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7500 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7500 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7142 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7142 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7024 ### HDFS DATE ### hdfs://### HDFS PATH ###
+-rw-rw-rw- 3 ### USER ### ### GROUP ### 7024 ### HDFS DATE ### hdfs://### HDFS PATH ###
PREHOOK: query: select distinct 7 as seven, INPUT__FILE__NAME from over10k_orc_bucketed_n0
PREHOOK: type: QUERY
PREHOOK: Input: default@over10k_orc_bucketed_n0
diff --git a/ql/src/test/results/clientpositive/tez/orc_merge12.q.out b/ql/src/test/results/clientpositive/tez/orc_merge12.q.out
index 42659b0..58acf1a 100644
--- a/ql/src/test/results/clientpositive/tez/orc_merge12.q.out
+++ b/ql/src/test/results/clientpositive/tez/orc_merge12.q.out
@@ -148,6 +148,7 @@ File Version: 0.12 with ORC_517
Rows: 24576
Compression: ZLIB
Compression size: 131072
+Calendar: Julian/Gregorian
Type: struct<atinyint:tinyint,asmallint:smallint,aint:int,abigint:bigint,afloat:float,adouble:double,astring1:string,astring2:string,atimestamp1:timestamp,atimestamp2:timestamp,aboolean1:boolean,aboolean2:boolean,btinyint:tinyint,bsmallint:smallint,bint:int,bbigint:bigint,bfloat:float,bdouble:double,bstring1:string,bstring2:string,btimestamp1:timestamp,btimestamp2:timestamp,bboolean1:boolean,bboolean2:boolean,ctinyint:tinyint,csmallint:smallint,cint:int,cbigint:bigint,cfloat:float,cdoubl [...]
Stripe Statistics:
@@ -813,7 +814,7 @@ Stripes:
Entry 0: count: 6889 hasNull: true true: 3402 positions: 0,0,0,0,0,0,0,0
Entry 1: count: 2284 hasNull: true true: 581 positions: 0,168,8,0,0,520,97,1
-File length: 3004630 bytes
+File length: 3004632 bytes
Padding length: 0 bytes
Padding ratio: 0%
________________________________________________________________________________________________________________________
diff --git a/ql/src/test/results/clientpositive/type_change_test_fraction.q.out b/ql/src/test/results/clientpositive/type_change_test_fraction.q.out
index 4a2ee3f..b5db3e1 100644
--- a/ql/src/test/results/clientpositive/type_change_test_fraction.q.out
+++ b/ql/src/test/results/clientpositive/type_change_test_fraction.q.out
@@ -5998,24 +5998,24 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n2
#### A masked pattern was here ####
1 123400.0 2.345E67
-2 1.4E-45 4.9E-324
-3 -1.4E-45 -4.9E-324
-4 3.4E38 1.79E308
-5 -3.4E38 -1.79E308
+2 1.401298464324817E-45 4.9E-324
+3 -1.401298464324817E-45 -4.9E-324
+4 3.3999999521443642E38 1.79E308
+5 -3.3999999521443642E38 -1.79E308
6 1.0 1.0
7 -1.0 -1.0
-8 1.23456794E18 1.23456789012345677E18
-9 -1.23456794E18 -1.23456789012345677E18
-10 1.23456794E9 1.23456789E9
-11 -1.23456794E9 -1.23456789E9
+8 1.23456793955060941E18 1.23456789012345677E18
+9 -1.23456793955060941E18 -1.23456789012345677E18
+10 1.234567936E9 1.23456789E9
+11 -1.234567936E9 -1.23456789E9
12 12345.0 12345.0
13 -12345.0 -12345.0
14 123.0 123.0
15 -123.0 -123.0
-16 1.2345679 2.3456789
-17 -1.2345679 -2.3456789
-18 1234567.9 2345678.9
-19 -1234567.9 -2345678.9
+16 1.2345678806304932 2.3456789
+17 -1.2345678806304932 -2.3456789
+18 1234567.875 2345678.9
+19 -1234567.875 -2345678.9
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n2
order by cId
PREHOOK: type: QUERY
@@ -6119,18 +6119,18 @@ POSTHOOK: Input: default@testaltcolorc_n2
5 NULL NULL
6 1.000000000000000000 1.000000000000000000
7 -1.000000000000000000 -1.000000000000000000
-8 1234567940000000000.000000000000000000 1234567890123456770.000000000000000000
-9 -1234567940000000000.000000000000000000 -1234567890123456770.000000000000000000
-10 1234567940.000000000000000000 1234567890.000000000000000000
-11 -1234567940.000000000000000000 -1234567890.000000000000000000
+8 1234567939550609410.000000000000000000 1234567890123456770.000000000000000000
+9 -1234567939550609410.000000000000000000 -1234567890123456770.000000000000000000
+10 1234567936.000000000000000000 1234567890.000000000000000000
+11 -1234567936.000000000000000000 -1234567890.000000000000000000
12 12345.000000000000000000 12345.000000000000000000
13 -12345.000000000000000000 -12345.000000000000000000
14 123.000000000000000000 123.000000000000000000
15 -123.000000000000000000 -123.000000000000000000
-16 1.234567900000000000 2.345678900000000000
-17 -1.234567900000000000 -2.345678900000000000
-18 1234567.900000000000000000 2345678.900000000000000000
-19 -1234567.900000000000000000 -2345678.900000000000000000
+16 1.234567880630493200 2.345678900000000000
+17 -1.234567880630493200 -2.345678900000000000
+18 1234567.875000000000000000 2345678.900000000000000000
+19 -1234567.875000000000000000 -2345678.900000000000000000
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n2
order by cId
PREHOOK: type: QUERY
@@ -6242,8 +6242,8 @@ POSTHOOK: Input: default@testaltcolorc_n2
13 NULL NULL
14 NULL NULL
15 NULL NULL
-16 1.2345679000000000000000000000000000000 2.3456789000000000000000000000000000000
-17 -1.2345679000000000000000000000000000000 -2.3456789000000000000000000000000000000
+16 1.2345678806304932000000000000000000000 2.3456789000000000000000000000000000000
+17 -1.2345678806304932000000000000000000000 -2.3456789000000000000000000000000000000
18 NULL NULL
19 NULL NULL
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n2
@@ -6349,18 +6349,18 @@ POSTHOOK: Input: default@testaltcolorc_n2
5 NULL NULL
6 1.000000000000000000 1.000000000000000000
7 -1.000000000000000000 -1.000000000000000000
-8 1234567940000000000.000000000000000000 1234567890123456770.000000000000000000
-9 -1234567940000000000.000000000000000000 -1234567890123456770.000000000000000000
-10 1234567940.000000000000000000 1234567890.000000000000000000
-11 -1234567940.000000000000000000 -1234567890.000000000000000000
+8 1234567939550609410.000000000000000000 1234567890123456770.000000000000000000
+9 -1234567939550609410.000000000000000000 -1234567890123456770.000000000000000000
+10 1234567936.000000000000000000 1234567890.000000000000000000
+11 -1234567936.000000000000000000 -1234567890.000000000000000000
12 12345.000000000000000000 12345.000000000000000000
13 -12345.000000000000000000 -12345.000000000000000000
14 123.000000000000000000 123.000000000000000000
15 -123.000000000000000000 -123.000000000000000000
-16 1.234567900000000000 2.345678900000000000
-17 -1.234567900000000000 -2.345678900000000000
-18 1234567.900000000000000000 2345678.900000000000000000
-19 -1234567.900000000000000000 -2345678.900000000000000000
+16 1.234567880630493200 2.345678900000000000
+17 -1.234567880630493200 -2.345678900000000000
+18 1234567.875000000000000000 2345678.900000000000000000
+19 -1234567.875000000000000000 -2345678.900000000000000000
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n2
order by cId
PREHOOK: type: QUERY
@@ -6472,8 +6472,8 @@ POSTHOOK: Input: default@testaltcolorc_n2
13 NULL NULL
14 NULL NULL
15 NULL NULL
-16 1.2345679000000000000000000000000000000 2.3456789000000000000000000000000000000
-17 -1.2345679000000000000000000000000000000 -2.3456789000000000000000000000000000000
+16 1.2345678806304932000000000000000000000 2.3456789000000000000000000000000000000
+17 -1.2345678806304932000000000000000000000 -2.3456789000000000000000000000000000000
18 NULL NULL
19 NULL NULL
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n2
diff --git a/ql/src/test/results/clientpositive/type_change_test_fraction_vectorized.q.out b/ql/src/test/results/clientpositive/type_change_test_fraction_vectorized.q.out
index cbe2182..955d268 100644
--- a/ql/src/test/results/clientpositive/type_change_test_fraction_vectorized.q.out
+++ b/ql/src/test/results/clientpositive/type_change_test_fraction_vectorized.q.out
@@ -5998,24 +5998,24 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n3
#### A masked pattern was here ####
1 123400.0 2.345E67
-2 1.4E-45 4.9E-324
-3 -1.4E-45 -4.9E-324
-4 3.4E38 1.79E308
-5 -3.4E38 -1.79E308
+2 1.401298464324817E-45 4.9E-324
+3 -1.401298464324817E-45 -4.9E-324
+4 3.3999999521443642E38 1.79E308
+5 -3.3999999521443642E38 -1.79E308
6 1.0 1.0
7 -1.0 -1.0
-8 1.23456794E18 1.23456789012345677E18
-9 -1.23456794E18 -1.23456789012345677E18
-10 1.23456794E9 1.23456789E9
-11 -1.23456794E9 -1.23456789E9
+8 1.23456793955060941E18 1.23456789012345677E18
+9 -1.23456793955060941E18 -1.23456789012345677E18
+10 1.234567936E9 1.23456789E9
+11 -1.234567936E9 -1.23456789E9
12 12345.0 12345.0
13 -12345.0 -12345.0
14 123.0 123.0
15 -123.0 -123.0
-16 1.2345679 2.3456789
-17 -1.2345679 -2.3456789
-18 1234567.9 2345678.9
-19 -1234567.9 -2345678.9
+16 1.2345678806304932 2.3456789
+17 -1.2345678806304932 -2.3456789
+18 1234567.875 2345678.9
+19 -1234567.875 -2345678.9
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n3
order by cId
PREHOOK: type: QUERY
@@ -6119,18 +6119,18 @@ POSTHOOK: Input: default@testaltcolorc_n3
5 NULL NULL
6 1.000000000000000000 1.000000000000000000
7 -1.000000000000000000 -1.000000000000000000
-8 1234567940000000000.000000000000000000 1234567890123456770.000000000000000000
-9 -1234567940000000000.000000000000000000 -1234567890123456770.000000000000000000
-10 1234567940.000000000000000000 1234567890.000000000000000000
-11 -1234567940.000000000000000000 -1234567890.000000000000000000
+8 1234567939550609410.000000000000000000 1234567890123456770.000000000000000000
+9 -1234567939550609410.000000000000000000 -1234567890123456770.000000000000000000
+10 1234567936.000000000000000000 1234567890.000000000000000000
+11 -1234567936.000000000000000000 -1234567890.000000000000000000
12 12345.000000000000000000 12345.000000000000000000
13 -12345.000000000000000000 -12345.000000000000000000
14 123.000000000000000000 123.000000000000000000
15 -123.000000000000000000 -123.000000000000000000
-16 1.234567900000000000 2.345678900000000000
-17 -1.234567900000000000 -2.345678900000000000
-18 1234567.900000000000000000 2345678.900000000000000000
-19 -1234567.900000000000000000 -2345678.900000000000000000
+16 1.234567880630493200 2.345678900000000000
+17 -1.234567880630493200 -2.345678900000000000
+18 1234567.875000000000000000 2345678.900000000000000000
+19 -1234567.875000000000000000 -2345678.900000000000000000
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n3
order by cId
PREHOOK: type: QUERY
@@ -6242,8 +6242,8 @@ POSTHOOK: Input: default@testaltcolorc_n3
13 NULL NULL
14 NULL NULL
15 NULL NULL
-16 1.2345679000000000000000000000000000000 2.3456789000000000000000000000000000000
-17 -1.2345679000000000000000000000000000000 -2.3456789000000000000000000000000000000
+16 1.2345678806304932000000000000000000000 2.3456789000000000000000000000000000000
+17 -1.2345678806304932000000000000000000000 -2.3456789000000000000000000000000000000
18 NULL NULL
19 NULL NULL
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n3
@@ -6349,18 +6349,18 @@ POSTHOOK: Input: default@testaltcolorc_n3
5 NULL NULL
6 1.000000000000000000 1.000000000000000000
7 -1.000000000000000000 -1.000000000000000000
-8 1234567940000000000.000000000000000000 1234567890123456770.000000000000000000
-9 -1234567940000000000.000000000000000000 -1234567890123456770.000000000000000000
-10 1234567940.000000000000000000 1234567890.000000000000000000
-11 -1234567940.000000000000000000 -1234567890.000000000000000000
+8 1234567939550609410.000000000000000000 1234567890123456770.000000000000000000
+9 -1234567939550609410.000000000000000000 -1234567890123456770.000000000000000000
+10 1234567936.000000000000000000 1234567890.000000000000000000
+11 -1234567936.000000000000000000 -1234567890.000000000000000000
12 12345.000000000000000000 12345.000000000000000000
13 -12345.000000000000000000 -12345.000000000000000000
14 123.000000000000000000 123.000000000000000000
15 -123.000000000000000000 -123.000000000000000000
-16 1.234567900000000000 2.345678900000000000
-17 -1.234567900000000000 -2.345678900000000000
-18 1234567.900000000000000000 2345678.900000000000000000
-19 -1234567.900000000000000000 -2345678.900000000000000000
+16 1.234567880630493200 2.345678900000000000
+17 -1.234567880630493200 -2.345678900000000000
+18 1234567.875000000000000000 2345678.900000000000000000
+19 -1234567.875000000000000000 -2345678.900000000000000000
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n3
order by cId
PREHOOK: type: QUERY
@@ -6472,8 +6472,8 @@ POSTHOOK: Input: default@testaltcolorc_n3
13 NULL NULL
14 NULL NULL
15 NULL NULL
-16 1.2345679000000000000000000000000000000 2.3456789000000000000000000000000000000
-17 -1.2345679000000000000000000000000000000 -2.3456789000000000000000000000000000000
+16 1.2345678806304932000000000000000000000 2.3456789000000000000000000000000000000
+17 -1.2345678806304932000000000000000000000 -2.3456789000000000000000000000000000000
18 NULL NULL
19 NULL NULL
PREHOOK: query: select cId, cDecimal38_18, cDecimal38_37, cDecimal16_8, cDecimal3_2 from testAltColORC_n3
diff --git a/ql/src/test/results/clientpositive/typechangetest.q.out b/ql/src/test/results/clientpositive/typechangetest.q.out
index 49ec29e..4a617d7 100644
--- a/ql/src/test/results/clientpositive/typechangetest.q.out
+++ b/ql/src/test/results/clientpositive/typechangetest.q.out
@@ -1258,10 +1258,10 @@ POSTHOOK: query: select cId, cTimeStamp from testAltColORC_n0 order by cId
POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
-1 2017-11-07 01:02:49.999999999
-2 1399-12-23 17:01:01.000000001
-3 1399-12-23 17:01:01.000000001
-4 1399-12-23 17:01:01.000000001
+1 2017-11-07 09:02:49.999999999
+2 1400-01-01 01:01:01.000000001
+3 1400-01-01 01:01:01.000000001
+4 1400-01-01 01:01:01.000000001
PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
PREHOOK: Input: default@testaltcolorc_n0
@@ -1270,10 +1270,10 @@ POSTHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 ord
POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
-1 12345678901234567890.123456789012345678 1.79E308 3.4E38
-2 1.1 2.2 3.3
-3 10.1 20.2 30.3
-4 -10.1 -20.2 -30.3
+1 12345678901234567890.123456789012345678 1.79E308 3.3999999521443642E38
+2 1.1 2.2 3.299999952316284
+3 10.1 20.2 30.299999237060547
+4 -10.1 -20.2 -30.299999237060547
PREHOOK: query: select cId, cBigInt, cInt, cSmallInt, cTinyint from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
PREHOOK: Input: default@testaltcolorc_n0
@@ -1334,10 +1334,10 @@ POSTHOOK: query: select cId, cTimeStamp from testAltColORC_n0 order by cId
POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
-1 2017-11-07 01:02:49.999999999
-2 1399-12-23 17:01:01.000000001
-3 1399-12-23 17:01:01.000000001
-4 1399-12-23 17:01:01.000000001
+1 2017-11-07 09:02:49.999999999
+2 1400-01-01 01:01:01.000000001
+3 1400-01-01 01:01:01.000000001
+4 1400-01-01 01:01:01.000000001
PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
PREHOOK: Input: default@testaltcolorc_n0
@@ -1346,10 +1346,10 @@ POSTHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 ord
POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
-1 12345678901234567890.123456789012345678 1.79E308 3.4E38
-2 1.1 2.2 3.3
-3 10.1 20.2 30.3
-4 -10.1 -20.2 -30.3
+1 12345678901234567890.123456789012345678 1.79E308 3.3999999521443642E38
+2 1.1 2.2 3.299999952316284
+3 10.1 20.2 30.299999237060547
+4 -10.1 -20.2 -30.299999237060547
PREHOOK: query: select cId, cBigInt, cInt, cSmallInt, cTinyint from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
PREHOOK: Input: default@testaltcolorc_n0
@@ -1410,10 +1410,10 @@ POSTHOOK: query: select cId, cTimeStamp from testAltColORC_n0 order by cId
POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
-1 2017-11-07 01:02:49.999999999
-2 1399-12-23 17:01:01.000000001
-3 1399-12-23 17:01:01.000000001
-4 1399-12-23 17:01:01.000000001
+1 2017-11-07 09:02:49.999999999
+2 1400-01-01 01:01:01.000000001
+3 1400-01-01 01:01:01.000000001
+4 1400-01-01 01:01:01.000000001
PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
PREHOOK: Input: default@testaltcolorc_n0
@@ -1422,10 +1422,10 @@ POSTHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 ord
POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
-1 12345678901234567890.123456789012345678 1.79E308 3.4E38
-2 1.1 2.2 3.3
-3 10.1 20.2 30.3
-4 -10.1 -20.2 -30.3
+1 12345678901234567890.123456789012345678 1.79E308 3.3999999521443642E38
+2 1.1 2.2 3.299999952316284
+3 10.1 20.2 30.299999237060547
+4 -10.1 -20.2 -30.299999237060547
PREHOOK: query: select cId, cBigInt, cInt, cSmallInt, cTinyint from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
PREHOOK: Input: default@testaltcolorc_n0
@@ -1487,9 +1487,9 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
1 2017
-2 1399
-3 1399
-4 1399
+2 1400
+3 1400
+4 1400
PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
PREHOOK: Input: default@testaltcolorc_n0
@@ -1498,9 +1498,9 @@ POSTHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 ord
POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
-1 1234 1.79 3.4E
-2 1.1 2.2 3.3
-3 10.1 20.2 30.3
+1 1234 1.79 3.39
+2 1.1 2.2 3.29
+3 10.1 20.2 30.2
4 -10. -20. -30.
PREHOOK: query: select cId, cBigInt, cInt, cSmallInt, cTinyint from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
@@ -1563,9 +1563,9 @@ POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
1 2017
-2 1399
-3 1399
-4 1399
+2 1400
+3 1400
+4 1400
PREHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
PREHOOK: Input: default@testaltcolorc_n0
@@ -1574,9 +1574,9 @@ POSTHOOK: query: select cId, cDecimal, cDouble, cFloat from testAltColORC_n0 ord
POSTHOOK: type: QUERY
POSTHOOK: Input: default@testaltcolorc_n0
#### A masked pattern was here ####
-1 1234 1.79 3.4E
-2 1.1 2.2 3.3
-3 10.1 20.2 30.3
+1 1234 1.79 3.39
+2 1.1 2.2 3.29
+3 10.1 20.2 30.2
4 -10. -20. -30.
PREHOOK: query: select cId, cBigInt, cInt, cSmallInt, cTinyint from testAltColORC_n0 order by cId
PREHOOK: type: QUERY
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
index db8db1c..27583b8 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
@@ -42,12 +42,12 @@ import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.EncoderFactory;
-import org.apache.avro.UnresolvedUnionException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZUtil;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.common.type.CalendarUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.type.HiveChar;
@@ -88,6 +88,11 @@ class AvroDeserializer {
*/
private ZoneId writerTimezone = null;
+ /**
+ * Whether the file was written using proleptic Gregorian or hybrid calendar.
+ */
+ private Boolean writerProleptic = null;
+
private Configuration configuration = null;
AvroDeserializer() {}
@@ -169,9 +174,10 @@ class AvroDeserializer {
GenericRecord r = recordWritable.getRecord();
Schema fileSchema = recordWritable.getFileSchema();
writerTimezone = recordWritable.getWriterTimezone();
+ writerProleptic = recordWritable.getWriterProleptic();
- UID recordReaderId = recordWritable.getRecordReaderID();
- //If the record reader (from which the record is originated) is already seen and valid,
+ UID recordReaderId = recordWritable.getRecordReaderID();
+ //If the record reader (from which the record is originated) is already seen and valid,
//no need to re-encode the record.
if(!noEncodingNeeded.contains(recordReaderId)) {
SchemaReEncoder reEncoder = null;
@@ -311,16 +317,30 @@ class AvroDeserializer {
str = datum.toString();
HiveVarchar hvc = new HiveVarchar(str, maxLength);
return hvc;
- case DATE:
+ case DATE: {
if (recordSchema.getType() != Type.INT) {
throw new AvroSerdeException("Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
}
- return Date.ofEpochMilli(DateWritableV2.daysToMillis((Integer)datum));
- case TIMESTAMP:
+ final boolean skipProlepticConversion;
+ if (writerProleptic != null) {
+ skipProlepticConversion = writerProleptic;
+ } else {
+ if (configuration != null) {
+ skipProlepticConversion = HiveConf.getBoolVar(
+ configuration, HiveConf.ConfVars.HIVE_AVRO_PROLEPTIC_GREGORIAN_DEFAULT);
+ } else {
+ skipProlepticConversion = HiveConf.ConfVars.HIVE_AVRO_PROLEPTIC_GREGORIAN_DEFAULT.defaultBoolVal;
+ }
+ }
+
+ return Date.ofEpochMilli(DateWritableV2.daysToMillis(
+ skipProlepticConversion ? (Integer) datum : CalendarUtils.convertDateToProleptic((Integer) datum)));
+ }
+ case TIMESTAMP: {
if (recordSchema.getType() != Type.LONG) {
throw new AvroSerdeException(
- "Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
+ "Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType());
}
// If a time zone is found in file metadata (property name: writer.time.zone), convert the
// timestamp to that (writer) time zone in order to emulate time zone agnostic behavior.
@@ -328,23 +348,40 @@ class AvroDeserializer {
// to the server's (reader) time zone for backwards compatibility reasons - unless the
// session level configuration hive.avro.timestamp.skip.conversion is set to true, in which
// case we assume it was written by a time zone agnostic writer, so we don't convert it.
- boolean skipConversion;
+ final boolean skipUTCConversion;
if (configuration != null) {
- skipConversion = HiveConf.getBoolVar(
+ skipUTCConversion = HiveConf.getBoolVar(
configuration, HiveConf.ConfVars.HIVE_AVRO_TIMESTAMP_SKIP_CONVERSION);
} else {
- skipConversion = HiveConf.ConfVars.HIVE_AVRO_TIMESTAMP_SKIP_CONVERSION.defaultBoolVal;
+ skipUTCConversion = HiveConf.ConfVars.HIVE_AVRO_TIMESTAMP_SKIP_CONVERSION.defaultBoolVal;
}
ZoneId convertToTimeZone;
if (writerTimezone != null) {
convertToTimeZone = writerTimezone;
- } else if (skipConversion) {
+ } else if (skipUTCConversion) {
convertToTimeZone = ZoneOffset.UTC;
} else {
convertToTimeZone = TimeZone.getDefault().toZoneId();
}
- Timestamp timestamp = Timestamp.ofEpochMilli((Long)datum);
- return TimestampTZUtil.convertTimestampToZone(timestamp, ZoneOffset.UTC, convertToTimeZone);
+ final boolean skipProlepticConversion;
+ if (writerProleptic != null) {
+ skipProlepticConversion = writerProleptic;
+ } else {
+ if (configuration != null) {
+ skipProlepticConversion = HiveConf.getBoolVar(
+ configuration, HiveConf.ConfVars.HIVE_AVRO_PROLEPTIC_GREGORIAN_DEFAULT);
+ } else {
+ skipProlepticConversion = HiveConf.ConfVars.HIVE_AVRO_PROLEPTIC_GREGORIAN_DEFAULT.defaultBoolVal;
+ }
+ }
+ Timestamp timestamp = TimestampTZUtil.convertTimestampToZone(
+ Timestamp.ofEpochMilli((Long) datum), ZoneOffset.UTC, convertToTimeZone);
+ if (!skipProlepticConversion) {
+ timestamp = Timestamp.ofEpochMilli(
+ CalendarUtils.convertTimeToProleptic(timestamp.toEpochMilli()));
+ }
+ return timestamp;
+ }
default:
return datum;
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroGenericRecordWritable.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroGenericRecordWritable.java
index 095197c..92b81a7 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroGenericRecordWritable.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroGenericRecordWritable.java
@@ -52,6 +52,8 @@ public class AvroGenericRecordWritable implements Writable{
// Time zone file was written in, from metadata
private ZoneId writerTimezone = null;
+ private Boolean writerProleptic = null;
+
/**
* Unique Id determine which record reader created this record
*/
@@ -78,8 +80,9 @@ public class AvroGenericRecordWritable implements Writable{
this.record = record;
}
- public AvroGenericRecordWritable(ZoneId writerTimezone) {
+ public AvroGenericRecordWritable(ZoneId writerTimezone, Boolean writerProleptic) {
this.writerTimezone = writerTimezone;
+ this.writerProleptic = writerProleptic;
}
@Override
@@ -153,4 +156,8 @@ public class AvroGenericRecordWritable implements Writable{
public ZoneId getWriterTimezone() {
return writerTimezone;
}
+
+ public Boolean getWriterProleptic() {
+ return writerProleptic;
+ }
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
index 905e19b..ff4a197 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
@@ -60,6 +60,7 @@ public class AvroSerDe extends AbstractSerDe {
public static final String DATE_TYPE_NAME = "date";
public static final String TIMESTAMP_TYPE_NAME = "timestamp-millis";
public static final String WRITER_TIME_ZONE = "writer.time.zone";
+ public static final String WRITER_PROLEPTIC = "writer.proleptic";
public static final String AVRO_PROP_LOGICAL_TYPE = "logicalType";
public static final String AVRO_PROP_PRECISION = "precision";
public static final String AVRO_PROP_SCALE = "scale";
@@ -148,7 +149,7 @@ public class AvroSerDe extends AbstractSerDe {
}
if(!badSchema) {
- this.avroSerializer = new AvroSerializer();
+ this.avroSerializer = new AvroSerializer(configuration);
this.avroDeserializer = new AvroDeserializer(configuration);
}
}
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
index 4331c11..490434d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerializer.java
@@ -31,13 +31,16 @@ import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericData.Fixed;
import org.apache.avro.generic.GenericEnumSymbol;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.common.type.TimestampTZUtil;
-import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.common.type.CalendarUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -56,13 +59,22 @@ import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
import org.apache.hadoop.io.Writable;
class AvroSerializer {
+
/**
* The Schema to use when serializing Map keys.
* Since we're sharing this across Serializer instances, it must be immutable;
* any properties need to be added in a static initializer.
*/
private static final Schema STRING_SCHEMA = Schema.create(Schema.Type.STRING);
- AvroGenericRecordWritable cache = new AvroGenericRecordWritable();
+ private AvroGenericRecordWritable cache = new AvroGenericRecordWritable();
+ private boolean defaultProleptic;
+
+ AvroSerializer() {}
+
+ AvroSerializer(Configuration configuration) {
+ this.defaultProleptic = HiveConf.getBoolVar(
+ configuration, ConfVars.HIVE_AVRO_PROLEPTIC_GREGORIAN);
+ }
// Hive is pretty simple (read: stupid) in writing out values via the serializer.
// We're just going to go through, matching indices. Hive formats normally
@@ -210,12 +222,15 @@ class AvroSerializer {
return vc.getValue();
case DATE:
Date date = ((DateObjectInspector)fieldOI).getPrimitiveJavaObject(structFieldData);
- return DateWritableV2.dateToDays(date);
+ return defaultProleptic ? date.toEpochDay() :
+ CalendarUtils.convertDateToHybrid(date.toEpochDay());
case TIMESTAMP:
Timestamp timestamp =
((TimestampObjectInspector) fieldOI).getPrimitiveJavaObject(structFieldData);
+ long millis = defaultProleptic ? timestamp.toEpochMilli() :
+ CalendarUtils.convertTimeToHybrid(timestamp.toEpochMilli());
timestamp = TimestampTZUtil.convertTimestampToZone(
- timestamp, TimeZone.getDefault().toZoneId(), ZoneOffset.UTC);
+ Timestamp.ofEpochMilli(millis), TimeZone.getDefault().toZoneId(), ZoneOffset.UTC);
return timestamp.toEpochMilli();
case UNKNOWN:
throw new AvroSerdeException("Received UNKNOWN primitive category.");
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java
index 1cd03f7..514bca7 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java
@@ -293,7 +293,7 @@ public class TestAvroDeserializer {
record.put("timestampField", 1546387200999L);
assertTrue(GENERIC_DATA.validate(readerSchema, record));
- AvroGenericRecordWritable agrw = new AvroGenericRecordWritable(ZoneId.of("America/New_York"));
+ AvroGenericRecordWritable agrw = new AvroGenericRecordWritable(ZoneId.of("America/New_York"), false);
agrw.setRecord(record);
agrw.setFileSchema(readerSchema);
agrw.setRecordReaderID(new UID());
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileFormatProxy.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileFormatProxy.java
index d403af1..e696591 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileFormatProxy.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileFormatProxy.java
@@ -21,6 +21,7 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.Metastore.SplitInfos;
@@ -37,7 +38,8 @@ public interface FileFormatProxy {
* @param fileMetadata File metadata from metastore cache.
* @return The result to return to client for this file, or null if file is eliminated.
*/
- SplitInfos applySargToMetadata(SearchArgument sarg, ByteBuffer fileMetadata) throws IOException;
+ SplitInfos applySargToMetadata(SearchArgument sarg, ByteBuffer fileMetadata,
+ Configuration conf) throws IOException;
/**
* @param fs The filesystem of the file.
diff --git a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
index ff30260..73917fc 100644
--- a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
+++ b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/FileMetadataHandler.java
@@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory;
public abstract class FileMetadataHandler {
protected static final Logger LOG = LoggerFactory.getLogger(FileMetadataHandler.class);
- private Configuration conf;
+ protected Configuration conf;
private PartitionExpressionProxy expressionProxy;
private FileFormatProxy fileFormatProxy;
private MetadataStore store;
diff --git a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
index d1a546f..a65e842 100644
--- a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
+++ b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
@@ -106,6 +106,7 @@ public class TestStructColumnVector {
LongColumnVector x1 = new LongColumnVector();
TimestampColumnVector x2 = new TimestampColumnVector();
x2.setIsUTC(true);
+ x2.setUsingProlepticCalendar(true);
StructColumnVector x = new StructColumnVector(1024, x1, x2);
BytesColumnVector y = new BytesColumnVector();
batch.cols[0] = x;
@@ -140,6 +141,7 @@ public class TestStructColumnVector {
LongColumnVector x1 = new LongColumnVector();
TimestampColumnVector x2 = new TimestampColumnVector();
x2.setIsUTC(true);
+ x2.setUsingProlepticCalendar(true);
StructColumnVector x = new StructColumnVector(1024, x1, x2);
BytesColumnVector y = new BytesColumnVector();
batch.cols[0] = x;