You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by mm...@apache.org on 2016/04/10 08:59:43 UTC

[11/12] hive git commit: HIVE-13111: Fix timestamp / interval_day_time wrong results with HIVE-9862 (Matt McCline, reviewed by Jason Dere)

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/itests/src/test/resources/testconfiguration.properties.orig
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties.orig b/itests/src/test/resources/testconfiguration.properties.orig
new file mode 100644
index 0000000..31cfb5b
--- /dev/null
+++ b/itests/src/test/resources/testconfiguration.properties.orig
@@ -0,0 +1,1327 @@
+# NOTE: files should be listed in alphabetical order
+minimr.query.files=auto_sortmerge_join_16.q,\
+  bucket4.q,\
+  bucket5.q,\
+  bucket6.q,\
+  bucket_many.q,\
+  bucket_num_reducers.q,\
+  bucket_num_reducers2.q,\
+  bucketizedhiveinputformat.q,\
+  bucketmapjoin6.q,\
+  bucketmapjoin7.q,\
+  constprog_partitioner.q,\
+  disable_merge_for_bucketing.q,\
+  empty_dir_in_table.q,\
+  exchgpartition2lel.q,\
+  external_table_with_space_in_location_path.q,\
+  file_with_header_footer.q,\
+  groupby2.q,\
+  import_exported_table.q,\
+  index_bitmap3.q,\
+  index_bitmap_auto.q,\
+  infer_bucket_sort_bucketed_table.q,\
+  infer_bucket_sort_dyn_part.q,\
+  infer_bucket_sort_map_operators.q,\
+  infer_bucket_sort_merge.q,\
+  infer_bucket_sort_num_buckets.q,\
+  infer_bucket_sort_reducers_power_two.q,\
+  input16_cc.q,\
+  insert_dir_distcp.q,\
+  join1.q,\
+  join_acid_non_acid.q,\
+  leftsemijoin_mr.q,\
+  list_bucket_dml_10.q,\
+  load_fs2.q,\
+  load_hdfs_file_with_space_in_the_name.q,\
+  non_native_window_udf.q, \
+  orc_merge_diff_fs.q,\
+  optrstat_groupby.q,\
+  parallel_orderby.q,\
+  quotedid_smb.q,\
+  reduce_deduplicate.q,\
+  remote_script.q,\
+  root_dir_external_table.q,\
+  schemeAuthority.q,\
+  schemeAuthority2.q,\
+  scriptfile1.q,\
+  scriptfile1_win.q,\
+  skewjoin_onesideskew.q,\
+  stats_counter.q,\
+  stats_counter_partitioned.q,\
+  table_nonprintable.q,\
+  temp_table_external.q,\
+  truncate_column_buckets.q,\
+  uber_reduce.q,\
+  udf_using.q
+
+# These tests are disabled for minimr
+#  ql_rewrite_gbtoidx.q,\
+#  ql_rewrite_gbtoidx_cbo_1.q,\
+#  ql_rewrite_gbtoidx_cbo_2.q,\
+#  smb_mapjoin_8.q,\
+
+
+# Tests that are not enabled for CLI Driver
+disabled.query.files=ql_rewrite_gbtoidx.q,\
+  ql_rewrite_gbtoidx_cbo_1.q,\
+  ql_rewrite_gbtoidx_cbo_2.q,\
+  rcfile_merge1.q,\
+  smb_mapjoin_8.q
+
+minitez.query.files.shared=acid_globallimit.q,\
+  empty_join.q,\
+  alter_merge_2_orc.q,\
+  alter_merge_orc.q,\
+  alter_merge_stats_orc.q,\
+  auto_join0.q,\
+  auto_join1.q,\
+  bucket2.q,\
+  bucket3.q,\
+  bucket4.q,\
+  cbo_gby.q,\
+  cbo_gby_empty.q,\
+  cbo_join.q,\
+  cbo_limit.q,\
+  cbo_semijoin.q,\
+  cbo_simple_select.q,\
+  cbo_stats.q,\
+  cbo_subq_exists.q,\
+  cbo_subq_in.q,\
+  cbo_subq_not_in.q,\
+  cbo_udf_udaf.q,\
+  cbo_union.q,\
+  cbo_views.q,\
+  cbo_windowing.q,\
+  correlationoptimizer1.q,\
+  count.q,\
+  create_merge_compressed.q,\
+  cross_join.q,\
+  cross_product_check_1.q,\
+  cross_product_check_2.q,\
+  ctas.q,\
+  custom_input_output_format.q,\
+  delete_all_non_partitioned.q,\
+  delete_all_partitioned.q,\
+  delete_orig_table.q,\
+  delete_tmp_table.q,\
+  delete_where_no_match.q,\
+  delete_where_non_partitioned.q,\
+  delete_where_partitioned.q,\
+  delete_whole_partition.q,\
+  disable_merge_for_bucketing.q,\
+  dynpart_sort_opt_vectorization.q,\
+  dynpart_sort_optimization.q,\
+  dynpart_sort_optimization2.q,\
+  enforce_order.q,\
+  filter_join_breaktask.q,\
+  filter_join_breaktask2.q,\
+  groupby1.q,\
+  groupby2.q,\
+  groupby3.q,\
+  having.q,\
+  identity_project_remove_skip.q\
+  insert1.q,\
+  insert_into1.q,\
+  insert_into2.q,\
+  insert_orig_table.q,\
+  insert_values_dynamic_partitioned.q,\
+  insert_values_non_partitioned.q,\
+  insert_values_orig_table.q\
+  insert_values_partitioned.q,\
+  insert_values_tmp_table.q,\
+  insert_update_delete.q,\
+  join0.q,\
+  join1.q,\
+  join_nullsafe.q,\
+  leftsemijoin.q,\
+  limit_pushdown.q,\
+  load_dyn_part1.q,\
+  load_dyn_part2.q,\
+  load_dyn_part3.q,\
+  mapjoin_mapjoin.q,\
+  mapreduce1.q,\
+  mapreduce2.q,\
+  merge1.q,\
+  merge2.q,\
+  mergejoin.q,\
+  metadataonly1.q,\
+  metadata_only_queries.q,\
+  metadata_only_queries_with_filters.q,\
+  nonmr_fetch_threshold.q,\
+  optimize_nullscan.q,\
+  orc_analyze.q,\
+  orc_merge1.q,\
+  orc_merge2.q,\
+  orc_merge3.q,\
+  orc_merge4.q,\
+  orc_merge5.q,\
+  orc_merge6.q,\
+  orc_merge7.q,\
+  orc_merge8.q,\
+  orc_merge9.q,\
+  orc_merge10.q,\
+  orc_merge11.q,\
+  orc_merge12.q,\
+  orc_merge_incompat1.q,\
+  orc_merge_incompat2.q,\
+  orc_merge_incompat3.q,\
+  orc_vectorization_ppd.q,\
+  parallel.q,\
+  ptf.q,\
+  ptf_matchpath.q,\
+  ptf_streaming.q,\
+  sample1.q,\
+  schema_evol_text_nonvec_mapwork_table.q,\
+  schema_evol_text_nonvec_fetchwork_table.q,\
+  schema_evol_orc_nonvec_fetchwork_part.q,\
+  schema_evol_orc_nonvec_mapwork_part.q,\
+  schema_evol_text_nonvec_fetchwork_part.q,\
+  schema_evol_text_nonvec_mapwork_part.q,\
+  schema_evol_orc_acid_mapwork_part.q,\
+  schema_evol_orc_acid_mapwork_table.q,\
+  schema_evol_orc_acidvec_mapwork_table.q,\
+  schema_evol_orc_acidvec_mapwork_part.q,\
+  schema_evol_orc_vec_mapwork_part.q,\
+  schema_evol_text_fetchwork_table.q,\
+  schema_evol_text_mapwork_table.q,\
+  schema_evol_orc_vec_mapwork_table.q,\
+  schema_evol_orc_nonvec_mapwork_table.q,\
+  schema_evol_orc_nonvec_fetchwork_table.q,\
+  selectDistinctStar.q,\
+  script_env_var1.q,\
+  script_env_var2.q,\
+  script_pipe.q,\
+  scriptfile1.q,\
+  select_dummy_source.q,\
+  skewjoin.q,\
+  stats_counter.q,\
+  stats_counter_partitioned.q,\
+  stats_noscan_1.q,\
+  stats_only_null.q,\
+  subquery_exists.q,\
+  subquery_in.q,\
+  temp_table.q,\
+  transform1.q,\
+  transform2.q,\
+  transform_ppr1.q,\
+  transform_ppr2.q,\
+  union2.q,\
+  union3.q,\
+  union4.q,\
+  union5.q,\
+  union6.q,\
+  union7.q,\
+  union8.q,\
+  union9.q,\
+  unionDistinct_1.q,\
+  unionDistinct_2.q,\
+  union_fast_stats.q,\
+  update_after_multiple_inserts.q,\
+  update_all_non_partitioned.q,\
+  update_all_partitioned.q,\
+  update_all_types.q,\
+  update_orig_table.q,\
+  update_tmp_table.q,\
+  update_where_no_match.q,\
+  update_where_non_partitioned.q,\
+  update_where_partitioned.q,\
+  update_two_cols.q,\
+  vector_acid3.q,\
+  vector_aggregate_9.q,\
+  vector_aggregate_without_gby.q,\
+  vector_auto_smb_mapjoin_14.q,\
+  vector_between_in.q,\
+  vector_between_columns.q,\
+  vector_binary_join_groupby.q,\
+  vector_bround.q,\
+  vector_bucket.q,\
+  vector_char_cast.q,\
+  vector_cast_constant.q,\
+  vector_char_2.q,\
+  vector_char_4.q,\
+  vector_char_mapjoin1.q,\
+  vector_char_simple.q,\
+  vector_coalesce.q,\
+  vector_coalesce_2.q,\
+  vector_complex_all.q,\
+  vector_count_distinct.q,\
+  vector_data_types.q,\
+  vector_date_1.q,\
+  vector_decimal_1.q,\
+  vector_decimal_10_0.q,\
+  vector_decimal_2.q,\
+  vector_decimal_3.q,\
+  vector_decimal_4.q,\
+  vector_decimal_5.q,\
+  vector_decimal_6.q,\
+  vector_decimal_aggregate.q,\
+  vector_decimal_cast.q,\
+  vector_decimal_expressions.q,\
+  vector_decimal_mapjoin.q,\
+  vector_decimal_math_funcs.q,\
+  vector_decimal_precision.q,\
+  vector_decimal_round.q,\
+  vector_decimal_round_2.q,\
+  vector_decimal_trailing.q,\
+  vector_decimal_udf.q,\
+  vector_decimal_udf2.q,\
+  vector_distinct_2.q,\
+  vector_elt.q,\
+  vector_groupby_3.q,\
+  vector_groupby_mapjoin.q,\
+  vector_groupby_reduce.q,\
+  vector_grouping_sets.q,\
+  vector_if_expr.q,\
+  vector_inner_join.q,\
+  vector_interval_1.q,\
+  vector_interval_2.q,\
+  vector_interval_mapjoin.q,\
+  vector_join30.q,\
+  vector_join_filters.q,\
+  vector_join_nulls.q,\
+  vector_left_outer_join.q,\
+  vector_left_outer_join2.q,\
+  vector_leftsemi_mapjoin.q,\
+  vector_mapjoin_reduce.q,\
+  vector_mr_diff_schema_alias.q,\
+  vector_multi_insert.q,\
+  vector_non_string_partition.q,\
+  vector_nullsafe_join.q,\
+  vector_null_projection.q,\
+  vector_nvl.q,\
+  vector_orderby_5.q,\
+  vector_outer_join0.q,\
+  vector_outer_join1.q,\
+  vector_outer_join2.q,\
+  vector_outer_join3.q,\
+  vector_outer_join4.q,\
+  vector_outer_join5.q,\
+  vector_outer_join6.q,\
+  vector_partition_diff_num_cols.q,\
+  vector_partitioned_date_time.q,\
+  vector_reduce_groupby_decimal.q,\
+  vector_reduce1.q,\
+  vector_reduce2.q,\
+  vector_reduce3.q,\
+  vector_string_concat.q,\
+  vector_struct_in.q,\
+  vector_varchar_4.q,\
+  vector_varchar_mapjoin1.q,\
+  vector_varchar_simple.q,\
+  vector_when_case_null.q,\
+  vectorization_0.q,\
+  vectorization_1.q,\
+  vectorization_10.q,\
+  vectorization_11.q,\
+  vectorization_12.q,\
+  vectorization_13.q,\
+  vectorization_14.q,\
+  vectorization_15.q,\
+  vectorization_16.q,\
+  vectorization_17.q,\
+  vectorization_2.q,\
+  vectorization_3.q,\
+  vectorization_4.q,\
+  vectorization_5.q,\
+  vectorization_6.q,\
+  vectorization_7.q,\
+  vectorization_8.q,\
+  vectorization_9.q,\
+  vectorization_decimal_date.q,\
+  vectorization_div0.q,\
+  vectorization_limit.q,\
+  vectorization_nested_udf.q,\
+  vectorization_not.q,\
+  vectorization_part.q,\
+  vectorization_part_project.q,\
+  vectorization_part_varchar.q,\
+  vectorization_pushdown.q,\
+  vectorization_short_regress.q,\
+  vectorized_bucketmapjoin1.q,\
+  vectorized_case.q,\
+  vectorized_casts.q,\
+  vectorized_context.q,\
+  vectorized_date_funcs.q,\
+  vectorized_distinct_gby.q,\
+  vectorized_mapjoin.q,\
+  vectorized_math_funcs.q,\
+  vectorized_nested_mapjoin.q,\
+  vectorized_parquet.q,\
+  vectorized_parquet_types.q,\
+  vectorized_ptf.q,\
+  vectorized_rcfile_columnar.q,\
+  vectorized_shufflejoin.q,\
+  vectorized_string_funcs.q,\
+  vectorized_timestamp_funcs.q,\
+  vectorized_timestamp_ints_casts.q,\
+  auto_sortmerge_join_1.q,\
+  auto_sortmerge_join_10.q,\
+  auto_sortmerge_join_11.q,\
+  auto_sortmerge_join_12.q,\
+  auto_sortmerge_join_13.q,\
+  auto_sortmerge_join_14.q,\
+  auto_sortmerge_join_15.q,\
+  auto_sortmerge_join_16.q,\
+  auto_sortmerge_join_2.q,\
+  auto_sortmerge_join_3.q,\
+  auto_sortmerge_join_4.q,\
+  auto_sortmerge_join_5.q,\
+  auto_sortmerge_join_6.q,\
+  auto_sortmerge_join_7.q,\
+  auto_sortmerge_join_8.q,\
+  auto_sortmerge_join_9.q,\
+  auto_join30.q,\
+  auto_join21.q,\
+  auto_join29.q,\
+  auto_join_filters.q,\
+  auto_join_nulls.q,\
+  union_type_chk.q
+
+
+minitez.query.files=bucket_map_join_tez1.q,\
+  smb_cache.q,\
+  bucket_map_join_tez2.q,\
+  constprog_dpp.q,\
+  dynamic_partition_pruning.q,\
+  dynamic_partition_pruning_2.q,\
+  bucketpruning1.q,\
+  explainuser_1.q,\
+  explainuser_2.q,\
+  explainuser_3.q,\
+  hybridgrace_hashjoin_1.q,\
+  hybridgrace_hashjoin_2.q,\
+  mapjoin_decimal.q,\
+  mergejoin_3way.q,\
+  lvj_mapjoin.q,\
+  llapdecider.q,\
+  mrr.q,\
+  orc_ppd_basic.q,\
+  orc_merge_diff_fs.q,\
+  tez_bmj_schema_evolution.q,\
+  tez_dml.q,\
+  tez_fsstat.q,\
+  tez_insert_overwrite_local_directory_1.q,\
+  tez_dynpart_hashjoin_1.q,\
+  tez_dynpart_hashjoin_2.q,\
+  tez_dynpart_hashjoin_3.q,\
+  tez_vector_dynpart_hashjoin_1.q,\
+  tez_vector_dynpart_hashjoin_2.q,\
+  tez_join_hash.q,\
+  tez_join_result_complex.q,\
+  tez_join_tests.q,\
+  tez_joins_explain.q,\
+  tez_schema_evolution.q,\
+  tez_self_join.q,\
+  tez_union.q,\
+  tez_union2.q,\
+  tez_union_dynamic_partition.q,\
+  tez_union_view.q,\
+  tez_union_with_udf.q,\
+  tez_union_decimal.q,\
+  tez_union_group_by.q,\
+  tez_smb_main.q,\
+  tez_smb_1.q,\
+  tez_smb_empty.q,\
+  vector_join_part_col_char.q,\
+  vectorized_dynamic_partition_pruning.q,\
+  tez_multi_union.q,\
+  tez_join.q,\
+  tez_union_multiinsert.q,\
+  windowing_gby.q
+
+
+
+
+minillap.query.files=bucket_map_join_tez1.q,\
+  bucket_map_join_tez2.q,\
+  constprog_dpp.q,\
+  dynamic_partition_pruning.q,\
+  dynamic_partition_pruning_2.q,\
+  hybridgrace_hashjoin_1.q,\
+  hybridgrace_hashjoin_2.q,\
+  mapjoin_decimal.q,\
+  lvj_mapjoin.q,\
+  llapdecider.q,\
+  mrr.q,\
+  orc_ppd_basic.q,\
+  tez_bmj_schema_evolution.q,\
+  tez_dml.q,\
+  tez_fsstat.q,\
+  tez_insert_overwrite_local_directory_1.q,\
+  tez_dynpart_hashjoin_1.q,\
+  tez_dynpart_hashjoin_2.q,\
+  tez_vector_dynpart_hashjoin_1.q,\
+  tez_vector_dynpart_hashjoin_2.q,\
+  tez_join_hash.q,\
+  tez_join_result_complex.q,\
+  tez_join_tests.q,\
+  tez_joins_explain.q,\
+  tez_schema_evolution.q,\
+  tez_self_join.q,\
+  tez_union.q,\
+  tez_union2.q,\
+  tez_union_dynamic_partition.q,\
+  tez_union_view.q,\
+  tez_union_decimal.q,\
+  tez_union_group_by.q,\
+  tez_smb_main.q,\
+  tez_smb_1.q,\
+  vector_join_part_col_char.q,\
+  vectorized_dynamic_partition_pruning.q,\
+  tez_multi_union.q,\
+  tez_join.q,\
+  tez_union_multiinsert.q
+
+encrypted.query.files=encryption_join_unencrypted_tbl.q,\
+  encryption_insert_partition_static.q,\
+  encryption_insert_partition_dynamic.q,\
+  encryption_join_with_different_encryption_keys.q,\
+  encryption_select_read_only_encrypted_tbl.q,\
+  encryption_select_read_only_unencrypted_tbl.q,\
+  encryption_load_data_to_encrypted_tables.q, \
+  encryption_unencrypted_nonhdfs_external_tables.q \
+  encryption_move_tbl.q \
+  encryption_drop_table.q \
+  encryption_insert_values.q \
+  encryption_drop_view.q \
+  encryption_drop_partition.q \
+  encryption_with_trash.q
+
+beeline.positive.exclude=add_part_exist.q,\
+  alter1.q,\
+  alter2.q,\
+  alter4.q,\
+  alter5.q,\
+  alter_rename_partition.q,\
+  alter_rename_partition_authorization.q,\
+  archive.q,\
+  archive_corrupt.q,\
+  archive_mr_1806.q,\
+  archive_multi.q,\
+  archive_multi_mr_1806.q,\
+  authorization_1.q,\
+  authorization_2.q,\
+  authorization_4.q,\
+  authorization_5.q,\
+  authorization_6.q,\
+  authorization_7.q,\
+  ba_table1.q,\
+  ba_table2.q,\
+  ba_table3.q,\
+  ba_table_udfs.q,\
+  binary_table_bincolserde.q,\
+  binary_table_colserde.q,\
+  cluster.q,\
+  columnarserde_create_shortcut.q,\
+  combine2.q,\
+  constant_prop.q,\
+  create_nested_type.q,\
+  create_or_replace_view.q,\
+  create_struct_table.q,\
+  create_union_table.q,\
+  database.q,\
+  database_location.q,\
+  database_properties.q,\
+  describe_database_json.q,\
+  drop_database_removes_partition_dirs.q,\
+  escape1.q,\
+  escape2.q,\
+  exim_00_nonpart_empty.q,\
+  exim_01_nonpart.q,\
+  exim_02_00_part_empty.q,\
+  exim_02_part.q,\
+  exim_03_nonpart_over_compat.q,\
+  exim_04_all_part.q,\
+  exim_04_evolved_parts.q,\
+  exim_05_some_part.q,\
+  exim_06_one_part.q,\
+  exim_07_all_part_over_nonoverlap.q,\
+  exim_08_nonpart_rename.q,\
+  exim_09_part_spec_nonoverlap.q,\
+  exim_10_external_managed.q,\
+  exim_11_managed_external.q,\
+  exim_12_external_location.q,\
+  exim_13_managed_location.q,\
+  exim_14_managed_location_over_existing.q,\
+  exim_15_external_part.q,\
+  exim_16_part_external.q,\
+  exim_17_part_managed.q,\
+  exim_18_part_external.q,\
+  exim_19_00_part_external_location.q,\
+  exim_19_part_external_location.q,\
+  exim_20_part_managed_location.q,\
+  exim_21_export_authsuccess.q,\
+  exim_22_import_exist_authsuccess.q,\
+  exim_23_import_part_authsuccess.q,\
+  exim_24_import_nonexist_authsuccess.q,\
+  global_limit.q,\
+  groupby_complex_types.q,\
+  groupby_complex_types_multi_single_reducer.q,\
+  index_auth.q,\
+  index_auto.q,\
+  index_auto_empty.q,\
+  index_bitmap.q,\
+  index_bitmap1.q,\
+  index_bitmap2.q,\
+  index_bitmap3.q,\
+  index_bitmap_auto.q,\
+  index_bitmap_rc.q,\
+  index_compact.q,\
+  index_compact_1.q,\
+  index_compact_2.q,\
+  index_compact_3.q,\
+  index_stale_partitioned.q,\
+  init_file.q,\
+  input16.q,\
+  input16_cc.q,\
+  input46.q,\
+  input_columnarserde.q,\
+  input_dynamicserde.q,\
+  input_lazyserde.q,\
+  input_testxpath3.q,\
+  input_testxpath4.q,\
+  insert2_overwrite_partitions.q,\
+  insertexternal1.q,\
+  join_thrift.q,\
+  lateral_view.q,\
+  load_binary_data.q,\
+  load_exist_part_authsuccess.q,\
+  load_nonpart_authsuccess.q,\
+  load_part_authsuccess.q,\
+  loadpart_err.q,\
+  lock1.q,\
+  lock2.q,\
+  lock3.q,\
+  lock4.q,\
+  merge_dynamic_partition.q,\
+  multi_insert.q,\
+  multi_insert_move_tasks_share_dependencies.q,\
+  null_column.q,\
+  ppd_clusterby.q,\
+  query_with_semi.q,\
+  rename_column.q,\
+  sample6.q,\
+  sample_islocalmode_hook.q,\
+  set_processor_namespaces.q,\
+  show_tables.q,\
+  source.q,\
+  split_sample.q,\
+  str_to_map.q,\
+  transform1.q,\
+  udaf_collect_set.q,\
+  udaf_context_ngrams.q,\
+  udaf_histogram_numeric.q,\
+  udaf_ngrams.q,\
+  udaf_percentile_approx.q,\
+  udf_array.q,\
+  udf_bitmap_and.q,\
+  udf_bitmap_or.q,\
+  udf_explode.q,\
+  udf_format_number.q,\
+  udf_map.q,\
+  udf_map_keys.q,\
+  udf_map_values.q,\
+  udf_max.q,\
+  udf_min.q,\
+  udf_named_struct.q,\
+  udf_percentile.q,\
+  udf_printf.q,\
+  udf_sentences.q,\
+  udf_sort_array.q,\
+  udf_split.q,\
+  udf_struct.q,\
+  udf_substr.q,\
+  udf_translate.q,\
+  udf_union.q,\
+  udf_xpath.q,\
+  udtf_stack.q,\
+  view.q,\
+  virtual_column.q
+
+minimr.query.negative.files=cluster_tasklog_retrieval.q,\
+  file_with_header_footer_negative.q,\
+  local_mapred_error_cache.q,\
+  mapreduce_stack_trace.q,\
+  mapreduce_stack_trace_hadoop20.q,\
+  mapreduce_stack_trace_turnoff.q,\
+  mapreduce_stack_trace_turnoff_hadoop20.q,\
+  minimr_broken_pipe.q,\
+  table_nonprintable_negative.q,\
+  udf_local_resource.q
+
+# tests are sorted use: perl -pe 's@\\\s*\n@ @g' testconfiguration.properties \
+# | awk -F= '/spark.query.files/{print $2}' | perl -pe 's@.q *, *@\n@g' \
+# | egrep -v '^ *$' |  sort -V | uniq | perl -pe 's@\n@.q, \\\n@g' | perl -pe 's@^@  @g'
+spark.query.files=add_part_multiple.q, \
+  alter_merge_orc.q, \
+  alter_merge_stats_orc.q, \
+  annotate_stats_join.q, \
+  auto_join0.q, \
+  auto_join1.q, \
+  auto_join10.q, \
+  auto_join11.q, \
+  auto_join12.q, \
+  auto_join13.q, \
+  auto_join14.q, \
+  auto_join15.q, \
+  auto_join16.q, \
+  auto_join17.q, \
+  auto_join18.q, \
+  auto_join18_multi_distinct.q, \
+  auto_join19.q, \
+  auto_join2.q, \
+  auto_join20.q, \
+  auto_join21.q, \
+  auto_join22.q, \
+  auto_join23.q, \
+  auto_join24.q, \
+  auto_join26.q, \
+  auto_join27.q, \
+  auto_join28.q, \
+  auto_join29.q, \
+  auto_join3.q, \
+  auto_join30.q, \
+  auto_join31.q, \
+  auto_join32.q, \
+  auto_join4.q, \
+  auto_join5.q, \
+  auto_join6.q, \
+  auto_join7.q, \
+  auto_join8.q, \
+  auto_join9.q, \
+  auto_join_filters.q, \
+  auto_join_nulls.q, \
+  auto_join_reordering_values.q, \
+  auto_join_stats.q, \
+  auto_join_stats2.q, \
+  auto_join_without_localtask.q, \
+  auto_smb_mapjoin_14.q, \
+  auto_sortmerge_join_1.q, \
+  auto_sortmerge_join_10.q, \
+  auto_sortmerge_join_12.q, \
+  auto_sortmerge_join_13.q, \
+  auto_sortmerge_join_14.q, \
+  auto_sortmerge_join_15.q, \
+  auto_sortmerge_join_16.q, \
+  auto_sortmerge_join_2.q, \
+  auto_sortmerge_join_3.q, \
+  auto_sortmerge_join_4.q, \
+  auto_sortmerge_join_5.q, \
+  auto_sortmerge_join_6.q, \
+  auto_sortmerge_join_7.q, \
+  auto_sortmerge_join_8.q, \
+  auto_sortmerge_join_9.q, \
+  avro_compression_enabled_native.q, \
+  avro_decimal_native.q, \
+  avro_joins.q, \
+  avro_joins_native.q, \
+  bucket2.q, \
+  bucket3.q, \
+  bucket4.q, \
+  bucket_map_join_1.q, \
+  bucket_map_join_2.q, \
+  bucket_map_join_spark1.q, \
+  bucket_map_join_spark2.q, \
+  bucket_map_join_spark3.q, \
+  bucket_map_join_spark4.q, \
+  bucket_map_join_tez1.q, \
+  bucket_map_join_tez2.q, \
+  bucketmapjoin1.q, \
+  bucketmapjoin10.q, \
+  bucketmapjoin11.q, \
+  bucketmapjoin12.q, \
+  bucketmapjoin13.q, \
+  bucketmapjoin2.q, \
+  bucketmapjoin3.q, \
+  bucketmapjoin4.q, \
+  bucketmapjoin5.q, \
+  bucketmapjoin7.q, \
+  bucketmapjoin8.q, \
+  bucketmapjoin9.q, \
+  bucketmapjoin_negative.q, \
+  bucketmapjoin_negative2.q, \
+  bucketmapjoin_negative3.q, \
+  bucketsortoptimize_insert_2.q, \
+  bucketsortoptimize_insert_4.q, \
+  bucketsortoptimize_insert_6.q, \
+  bucketsortoptimize_insert_7.q, \
+  bucketsortoptimize_insert_8.q, \
+  cbo_gby.q, \
+  cbo_gby_empty.q, \
+  cbo_limit.q, \
+  cbo_semijoin.q, \
+  cbo_simple_select.q, \
+  cbo_stats.q, \
+  cbo_subq_in.q, \
+  cbo_subq_not_in.q, \
+  cbo_udf_udaf.q, \
+  cbo_union.q, \
+  column_access_stats.q, \
+  count.q, \
+  create_merge_compressed.q, \
+  cross_join.q, \
+  cross_product_check_1.q, \
+  cross_product_check_2.q, \
+  ctas.q, \
+  custom_input_output_format.q, \
+  date_join1.q, \
+  date_udf.q, \
+  decimal_1_1.q, \
+  decimal_join.q, \
+  disable_merge_for_bucketing.q, \
+  dynamic_rdd_cache.q, \
+  enforce_order.q, \
+  escape_clusterby1.q, \
+  escape_distributeby1.q, \
+  escape_orderby1.q, \
+  escape_sortby1.q, \
+  filter_join_breaktask.q, \
+  filter_join_breaktask2.q, \
+  groupby1.q, \
+  groupby10.q, \
+  groupby11.q, \
+  groupby1_map.q, \
+  groupby1_map_nomap.q, \
+  groupby1_map_skew.q, \
+  groupby1_noskew.q, \
+  groupby2.q, \
+  groupby2_map.q, \
+  groupby2_map_multi_distinct.q, \
+  groupby2_map_skew.q, \
+  groupby2_noskew.q, \
+  groupby2_noskew_multi_distinct.q, \
+  groupby3.q, \
+  groupby3_map.q, \
+  groupby3_map_multi_distinct.q, \
+  groupby3_map_skew.q, \
+  groupby3_noskew.q, \
+  groupby3_noskew_multi_distinct.q, \
+  groupby4.q, \
+  groupby4_map.q, \
+  groupby4_map_skew.q, \
+  groupby4_noskew.q, \
+  groupby5.q, \
+  groupby5_map.q, \
+  groupby5_map_skew.q, \
+  groupby5_noskew.q, \
+  groupby6.q, \
+  groupby6_map.q, \
+  groupby6_map_skew.q, \
+  groupby6_noskew.q, \
+  groupby7.q, \
+  groupby7_map.q, \
+  groupby7_map_multi_single_reducer.q, \
+  groupby7_map_skew.q, \
+  groupby7_noskew.q, \
+  groupby7_noskew_multi_single_reducer.q, \
+  groupby8.q, \
+  groupby8_map.q, \
+  groupby8_map_skew.q, \
+  groupby8_noskew.q, \
+  groupby9.q, \
+  groupby_bigdata.q, \
+  groupby_complex_types.q, \
+  groupby_complex_types_multi_single_reducer.q, \
+  groupby_cube1.q, \
+  groupby_grouping_id2.q, \
+  groupby_map_ppr.q, \
+  groupby_map_ppr_multi_distinct.q, \
+  groupby_multi_insert_common_distinct.q, \
+  groupby_multi_single_reducer.q, \
+  groupby_multi_single_reducer2.q, \
+  groupby_multi_single_reducer3.q, \
+  groupby_position.q, \
+  groupby_ppr.q, \
+  groupby_ppr_multi_distinct.q, \
+  groupby_resolution.q, \
+  groupby_rollup1.q, \
+  groupby_sort_1_23.q, \
+  groupby_sort_skew_1.q, \
+  groupby_sort_skew_1_23.q, \
+  having.q, \
+  identity_project_remove_skip.q, \
+  index_auto_self_join.q, \
+  innerjoin.q, \
+  input12.q, \
+  input13.q, \
+  input14.q, \
+  input17.q, \
+  input18.q, \
+  input1_limit.q, \
+  input_part2.q, \
+  insert1.q, \
+  insert_into1.q, \
+  insert_into2.q, \
+  insert_into3.q, \
+  join0.q, \
+  join1.q, \
+  join10.q, \
+  join11.q, \
+  join12.q, \
+  join13.q, \
+  join14.q, \
+  join15.q, \
+  join16.q, \
+  join17.q, \
+  join18.q, \
+  join18_multi_distinct.q, \
+  join19.q, \
+  join2.q, \
+  join20.q, \
+  join21.q, \
+  join22.q, \
+  join23.q, \
+  join24.q, \
+  join25.q, \
+  join26.q, \
+  join27.q, \
+  join28.q, \
+  join29.q, \
+  join3.q, \
+  join30.q, \
+  join31.q, \
+  join32.q, \
+  join32_lessSize.q, \
+  join33.q, \
+  join34.q, \
+  join35.q, \
+  join36.q, \
+  join37.q, \
+  join38.q, \
+  join39.q, \
+  join4.q, \
+  join40.q, \
+  join41.q, \
+  join5.q, \
+  join6.q, \
+  join7.q, \
+  join8.q, \
+  join9.q, \
+  join_1to1.q, \
+  join_alt_syntax.q, \
+  join_array.q, \
+  join_casesensitive.q, \
+  join_cond_pushdown_1.q, \
+  join_cond_pushdown_2.q, \
+  join_cond_pushdown_3.q, \
+  join_cond_pushdown_4.q, \
+  join_cond_pushdown_unqual1.q, \
+  join_cond_pushdown_unqual2.q, \
+  join_cond_pushdown_unqual3.q, \
+  join_cond_pushdown_unqual4.q, \
+  join_empty.q, \
+  join_filters_overlap.q, \
+  join_hive_626.q, \
+  join_literals.q, \
+  join_map_ppr.q, \
+  join_merge_multi_expressions.q, \
+  join_merging.q, \
+  join_nullsafe.q, \
+  join_rc.q, \
+  join_reorder.q, \
+  join_reorder2.q, \
+  join_reorder3.q, \
+  join_reorder4.q, \
+  join_star.q, \
+  join_thrift.q, \
+  join_vc.q, \
+  join_view.q, \
+  lateral_view_explode2.q, \
+  leftsemijoin.q, \
+  leftsemijoin_mr.q, \
+  limit_partition_metadataonly.q, \
+  limit_pushdown.q, \
+  list_bucket_dml_2.q, \
+  load_dyn_part1.q, \
+  load_dyn_part10.q, \
+  load_dyn_part11.q, \
+  load_dyn_part12.q, \
+  load_dyn_part13.q, \
+  load_dyn_part14.q, \
+  load_dyn_part15.q, \
+  load_dyn_part2.q, \
+  load_dyn_part3.q, \
+  load_dyn_part4.q, \
+  load_dyn_part5.q, \
+  load_dyn_part6.q, \
+  load_dyn_part7.q, \
+  load_dyn_part8.q, \
+  load_dyn_part9.q, \
+  louter_join_ppr.q, \
+  mapjoin1.q, \
+  mapjoin_addjar.q, \
+  mapjoin_decimal.q, \
+  mapjoin_distinct.q, \
+  mapjoin_filter_on_outerjoin.q, \
+  mapjoin_mapjoin.q, \
+  mapjoin_memcheck.q, \
+  mapjoin_subquery.q, \
+  mapjoin_subquery2.q, \
+  mapjoin_test_outer.q, \
+  mapreduce1.q, \
+  mapreduce2.q, \
+  merge1.q, \
+  merge2.q, \
+  mergejoins.q, \
+  mergejoins_mixed.q, \
+  metadata_only_queries.q, \
+  metadata_only_queries_with_filters.q, \
+  multi_insert.q, \
+  multi_insert_gby.q, \
+  multi_insert_gby2.q, \
+  multi_insert_gby3.q, \
+  multi_insert_lateral_view.q, \
+  multi_insert_mixed.q, \
+  multi_insert_move_tasks_share_dependencies.q, \
+  multi_join_union.q, \
+  multi_join_union_src.q, \
+  multigroupby_singlemr.q, \
+  nullgroup.q, \
+  nullgroup2.q, \
+  nullgroup4.q, \
+  nullgroup4_multi_distinct.q, \
+  optimize_nullscan.q, \
+  order.q, \
+  order2.q, \
+  outer_join_ppr.q, \
+  parallel.q, \
+  parallel_join0.q, \
+  parallel_join1.q, \
+  parquet_join.q, \
+  pcr.q, \
+  ppd_gby_join.q, \
+  ppd_join.q, \
+  ppd_join2.q, \
+  ppd_join3.q, \
+  ppd_join4.q, \
+  ppd_join5.q, \
+  ppd_join_filter.q, \
+  ppd_multi_insert.q, \
+  ppd_outer_join1.q, \
+  ppd_outer_join2.q, \
+  ppd_outer_join3.q, \
+  ppd_outer_join4.q, \
+  ppd_outer_join5.q, \
+  ppd_transform.q, \
+  ptf.q, \
+  ptf_decimal.q, \
+  ptf_general_queries.q, \
+  ptf_matchpath.q, \
+  ptf_rcfile.q, \
+  ptf_register_tblfn.q, \
+  ptf_seqfile.q, \
+  ptf_streaming.q, \
+  rcfile_bigdata.q, \
+  reduce_deduplicate_exclude_join.q, \
+  router_join_ppr.q, \
+  runtime_skewjoin_mapjoin_spark.q, \
+  sample1.q, \
+  sample10.q, \
+  sample2.q, \
+  sample3.q, \
+  sample4.q, \
+  sample5.q, \
+  sample6.q, \
+  sample7.q, \
+  sample8.q, \
+  sample9.q, \
+  script_env_var1.q, \
+  script_env_var2.q, \
+  script_pipe.q, \
+  scriptfile1.q, \
+  semijoin.q, \
+  skewjoin.q, \
+  skewjoin_noskew.q, \
+  skewjoin_union_remove_1.q, \
+  skewjoin_union_remove_2.q, \
+  skewjoinopt1.q, \
+  skewjoinopt10.q, \
+  skewjoinopt11.q, \
+  skewjoinopt12.q, \
+  skewjoinopt13.q, \
+  skewjoinopt14.q, \
+  skewjoinopt15.q, \
+  skewjoinopt16.q, \
+  skewjoinopt17.q, \
+  skewjoinopt18.q, \
+  skewjoinopt19.q, \
+  skewjoinopt2.q, \
+  skewjoinopt20.q, \
+  skewjoinopt3.q, \
+  skewjoinopt4.q, \
+  skewjoinopt5.q, \
+  skewjoinopt6.q, \
+  skewjoinopt7.q, \
+  skewjoinopt8.q, \
+  skewjoinopt9.q, \
+  smb_mapjoin_1.q, \
+  smb_mapjoin_10.q, \
+  smb_mapjoin_11.q, \
+  smb_mapjoin_12.q, \
+  smb_mapjoin_13.q, \
+  smb_mapjoin_14.q, \
+  smb_mapjoin_15.q, \
+  smb_mapjoin_16.q, \
+  smb_mapjoin_17.q, \
+  smb_mapjoin_18.q, \
+  smb_mapjoin_19.q, \
+  smb_mapjoin_2.q, \
+  smb_mapjoin_20.q, \
+  smb_mapjoin_21.q, \
+  smb_mapjoin_22.q, \
+  smb_mapjoin_25.q, \
+  smb_mapjoin_3.q, \
+  smb_mapjoin_4.q, \
+  smb_mapjoin_5.q, \
+  smb_mapjoin_6.q, \
+  smb_mapjoin_7.q, \
+  smb_mapjoin_8.q, \
+  smb_mapjoin_9.q, \
+  sort.q, \
+  stats0.q, \
+  stats1.q, \
+  stats10.q, \
+  stats12.q, \
+  stats13.q, \
+  stats14.q, \
+  stats15.q, \
+  stats16.q, \
+  stats18.q, \
+  stats2.q, \
+  stats20.q, \
+  stats3.q, \
+  stats5.q, \
+  stats6.q, \
+  stats7.q, \
+  stats8.q, \
+  stats9.q, \
+  stats_counter.q, \
+  stats_counter_partitioned.q, \
+  stats_noscan_1.q, \
+  stats_noscan_2.q, \
+  stats_only_null.q, \
+  stats_partscan_1_23.q, \
+  statsfs.q, \
+  subquery_exists.q, \
+  subquery_in.q, \
+  subquery_multiinsert.q, \
+  table_access_keys_stats.q, \
+  temp_table.q, \
+  temp_table_gb1.q, \
+  temp_table_join1.q, \
+  tez_join_tests.q, \
+  tez_joins_explain.q, \
+  timestamp_1.q, \
+  timestamp_2.q, \
+  timestamp_3.q, \
+  timestamp_comparison.q, \
+  timestamp_lazy.q, \
+  timestamp_null.q, \
+  timestamp_udf.q, \
+  transform1.q, \
+  transform2.q, \
+  transform_ppr1.q, \
+  transform_ppr2.q, \
+  udaf_collect_set.q, \
+  udf_example_add.q, \
+  udf_in_file.q, \
+  udf_max.q, \
+  udf_min.q, \
+  udf_percentile.q, \
+  union.q, \
+  union10.q, \
+  union11.q, \
+  union12.q, \
+  union13.q, \
+  union14.q, \
+  union15.q, \
+  union16.q, \
+  union17.q, \
+  union18.q, \
+  union19.q, \
+  union2.q, \
+  union20.q, \
+  union21.q, \
+  union22.q, \
+  union23.q, \
+  union24.q, \
+  union25.q, \
+  union26.q, \
+  union27.q, \
+  union28.q, \
+  union29.q, \
+  union3.q, \
+  union30.q, \
+  union31.q, \
+  union32.q, \
+  union33.q, \
+  union34.q, \
+  union4.q, \
+  union5.q, \
+  union6.q, \
+  union7.q, \
+  union8.q, \
+  union9.q, \
+  union_date.q, \
+  union_date_trim.q, \
+  union_lateralview.q, \
+  union_null.q, \
+  union_ppr.q, \
+  union_remove_1.q, \
+  union_remove_10.q, \
+  union_remove_11.q, \
+  union_remove_12.q, \
+  union_remove_13.q, \
+  union_remove_14.q, \
+  union_remove_15.q, \
+  union_remove_16.q, \
+  union_remove_17.q, \
+  union_remove_18.q, \
+  union_remove_19.q, \
+  union_remove_2.q, \
+  union_remove_20.q, \
+  union_remove_21.q, \
+  union_remove_22.q, \
+  union_remove_23.q, \
+  union_remove_24.q, \
+  union_remove_25.q, \
+  union_remove_3.q, \
+  union_remove_4.q, \
+  union_remove_5.q, \
+  union_remove_6.q, \
+  union_remove_6_subq.q, \
+  union_remove_7.q, \
+  union_remove_8.q, \
+  union_remove_9.q, \
+  union_script.q, \
+  union_top_level.q, \
+  union_view.q, \
+  uniquejoin.q, \
+  varchar_join1.q, \
+  vector_between_in.q, \
+  vector_cast_constant.q, \
+  vector_char_4.q, \
+  vector_count_distinct.q, \
+  vector_data_types.q, \
+  vector_decimal_aggregate.q, \
+  vector_decimal_mapjoin.q, \
+  vector_distinct_2.q, \
+  vector_elt.q, \
+  vector_groupby_3.q, \
+  vector_left_outer_join.q, \
+  vector_mapjoin_reduce.q, \
+  vector_orderby_5.q, \
+  vector_string_concat.q, \
+  vector_varchar_4.q, \
+  vectorization_0.q, \
+  vectorization_1.q, \
+  vectorization_10.q, \
+  vectorization_11.q, \
+  vectorization_12.q, \
+  vectorization_13.q, \
+  vectorization_14.q, \
+  vectorization_15.q, \
+  vectorization_16.q, \
+  vectorization_17.q, \
+  vectorization_2.q, \
+  vectorization_3.q, \
+  vectorization_4.q, \
+  vectorization_5.q, \
+  vectorization_6.q, \
+  vectorization_9.q, \
+  vectorization_decimal_date.q, \
+  vectorization_div0.q, \
+  vectorization_nested_udf.q, \
+  vectorization_not.q, \
+  vectorization_part.q, \
+  vectorization_part_project.q, \
+  vectorization_pushdown.q, \
+  vectorization_short_regress.q, \
+  vectorized_case.q, \
+  vectorized_mapjoin.q, \
+  vectorized_math_funcs.q, \
+  vectorized_nested_mapjoin.q, \
+  vectorized_ptf.q, \
+  vectorized_rcfile_columnar.q, \
+  vectorized_shufflejoin.q, \
+  vectorized_string_funcs.q, \
+  vectorized_timestamp_funcs.q, \
+  windowing.q
+
+# Unlike "spark.query.files" above, these tests only run
+# under Spark engine.
+spark.only.query.files=spark_dynamic_partition_pruning.q,\
+  spark_dynamic_partition_pruning_2.q,\
+  spark_vectorized_dynamic_partition_pruning.q
+
+miniSparkOnYarn.query.files=auto_sortmerge_join_16.q,\
+  bucket4.q,\
+  bucket5.q,\
+  bucket6.q,\
+  bucketizedhiveinputformat.q,\
+  bucketmapjoin6.q,\
+  bucketmapjoin7.q,\
+  constprog_partitioner.q,\
+  disable_merge_for_bucketing.q,\
+  empty_dir_in_table.q,\
+  external_table_with_space_in_location_path.q,\
+  file_with_header_footer.q,\
+  import_exported_table.q,\
+  index_bitmap3.q,\
+  index_bitmap_auto.q,\
+  infer_bucket_sort_bucketed_table.q,\
+  infer_bucket_sort_map_operators.q,\
+  infer_bucket_sort_merge.q,\
+  infer_bucket_sort_num_buckets.q,\
+  infer_bucket_sort_reducers_power_two.q,\
+  input16_cc.q,\
+  leftsemijoin_mr.q,\
+  list_bucket_dml_10.q,\
+  load_fs2.q,\
+  load_hdfs_file_with_space_in_the_name.q,\
+  optrstat_groupby.q,\
+  orc_merge1.q,\
+  orc_merge2.q,\
+  orc_merge3.q,\
+  orc_merge4.q,\
+  orc_merge5.q,\
+  orc_merge6.q,\
+  orc_merge7.q,\
+  orc_merge8.q,\
+  orc_merge9.q,\
+  orc_merge_diff_fs.q,\
+  orc_merge_incompat1.q,\
+  orc_merge_incompat2.q,\
+  parallel_orderby.q,\
+  quotedid_smb.q,\
+  reduce_deduplicate.q,\
+  remote_script.q,\
+  root_dir_external_table.q,\
+  schemeAuthority.q,\
+  schemeAuthority2.q,\
+  scriptfile1.q,\
+  scriptfile1_win.q,\
+  stats_counter.q,\
+  stats_counter_partitioned.q,\
+  temp_table_external.q,\
+  truncate_column_buckets.q,\
+  uber_reduce.q,\
+  vector_inner_join.q,\
+  vector_outer_join0.q,\
+  vector_outer_join1.q,\
+  vector_outer_join2.q,\
+  vector_outer_join3.q,\
+  vector_outer_join4.q,\
+  vector_outer_join5.q
+
+# These tests are removed from miniSparkOnYarn.query.files
+#  ql_rewrite_gbtoidx.q,\
+#  ql_rewrite_gbtoidx_cbo_1.q,\
+#  smb_mapjoin_8.q,\
+
+
+spark.query.negative.files=groupby2_map_skew_multi_distinct.q,\
+  groupby2_multi_distinct.q,\
+  groupby3_map_skew_multi_distinct.q,\
+  groupby3_multi_distinct.q,\
+  groupby_grouping_sets7.q

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/orc/src/java/org/apache/orc/impl/WriterImpl.java
----------------------------------------------------------------------
diff --git a/orc/src/java/org/apache/orc/impl/WriterImpl.java b/orc/src/java/org/apache/orc/impl/WriterImpl.java
index 7eaa761..f8afe06 100644
--- a/orc/src/java/org/apache/orc/impl/WriterImpl.java
+++ b/orc/src/java/org/apache/orc/impl/WriterImpl.java
@@ -1735,19 +1735,17 @@ public class WriterImpl implements Writer, MemoryManager.Callback {
                     int length) throws IOException {
       super.writeBatch(vector, offset, length);
       TimestampColumnVector vec = (TimestampColumnVector) vector;
+      Timestamp val;
       if (vector.isRepeating) {
         if (vector.noNulls || !vector.isNull[0]) {
-          long millis = vec.getEpochMilliseconds(0);
-          int adjustedNanos = vec.getSignedNanos(0);
-          if (adjustedNanos < 0) {
-            adjustedNanos += NANOS_PER_SECOND;
-          }
+          val = vec.asScratchTimestamp(0);
+          long millis = val.getTime();
           indexStatistics.updateTimestamp(millis);
           if (createBloomFilter) {
             bloomFilter.addLong(millis);
           }
-          final long secs = vec.getEpochSeconds(0) - base_timestamp;
-          final long nano = formatNanos(adjustedNanos);
+          final long secs = millis / MILLIS_PER_SECOND - base_timestamp;
+          final long nano = formatNanos(val.getNanos());
           for(int i=0; i < length; ++i) {
             seconds.write(secs);
             nanos.write(nano);
@@ -1756,14 +1754,11 @@ public class WriterImpl implements Writer, MemoryManager.Callback {
       } else {
         for(int i=0; i < length; ++i) {
           if (vec.noNulls || !vec.isNull[i + offset]) {
-            long secs = vec.getEpochSeconds(i + offset) - base_timestamp;
-            long millis = vec.getEpochMilliseconds(i + offset);
-            int adjustedNanos = vec.getSignedNanos(i + offset);
-            if (adjustedNanos < 0) {
-              adjustedNanos += NANOS_PER_SECOND;
-            }
+            val = vec.asScratchTimestamp(i + offset);
+            long millis = val.getTime();
+            long secs = millis / MILLIS_PER_SECOND - base_timestamp;
             seconds.write(secs);
-            nanos.write(formatNanos(adjustedNanos));
+            nanos.write(formatNanos(val.getNanos()));
             indexStatistics.updateTimestamp(millis);
             if (createBloomFilter) {
               bloomFilter.addLong(millis);

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
index 845bc5f..c3d8d7e 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthColumn.txt
@@ -18,15 +18,18 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
- * Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic 
+ * Generated from template DateColumnArithmeticIntervalYearMonthColumn.txt, which covers binary arithmetic
  * expressions between date and interval year month columns.
  */
 public class <ClassName> extends VectorExpression {
@@ -36,12 +39,18 @@ public class <ClassName> extends VectorExpression {
   private int colNum1;
   private int colNum2;
   private int outputColumn;
+  private Date scratchDate1;
+  private HiveIntervalYearMonth scratchIntervalYearMonth2;
+  private Date outputDate;
   private DateTimeMath dtm = new DateTimeMath();
 
   public <ClassName>(int colNum1, int colNum2, int outputColumn) {
     this.colNum1 = colNum1;
     this.colNum2 = colNum2;
     this.outputColumn = outputColumn;
+    scratchDate1 = new Date(0);
+    scratchIntervalYearMonth2 = new HiveIntervalYearMonth();
+    outputDate = new Date(0);
   }
 
   public <ClassName>() {
@@ -54,10 +63,10 @@ public class <ClassName> extends VectorExpression {
       super.evaluateChildren(batch);
     }
 
-    // Input #1 is type date (epochDays).
+    // Input #1 is type date.
     LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
 
-    // Input #2 is type interval_year_month (months).
+    // Input #2 is type interval_year_month.
     LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
 
     // Output is type date.
@@ -89,38 +98,65 @@ public class <ClassName> extends VectorExpression {
      * conditional checks in the inner loop.
      */
     if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      outputVector[0] = dtm.addMonthsToDays(vector1[0], <OperatorSymbol> (int) vector2[0]);
+      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      scratchIntervalYearMonth2.set((int) vector2[0]);
+      dtm.<OperatorMethod>(
+          scratchDate1, scratchIntervalYearMonth2,  outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
     } else if (inputColVector1.isRepeating) {
+      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = dtm.addMonthsToDays(vector1[0], <OperatorSymbol> (int) vector2[i]);
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = dtm.addMonthsToDays(vector1[0], <OperatorSymbol> (int) vector2[i]);
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else if (inputColVector2.isRepeating) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = dtm.addMonthsToDays(vector1[i], <OperatorSymbol> (int) vector2[0]);
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = dtm.addMonthsToDays(vector1[i], <OperatorSymbol> (int) vector2[0]);
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = dtm.addMonthsToDays(vector1[i], <OperatorSymbol> (int) vector2[i]);
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = dtm.addMonthsToDays(vector1[i], <OperatorSymbol> (int) vector2[i]);
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              scratchDate1, scratchIntervalYearMonth2,  outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
index 86a95c9..d1474fb 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticIntervalYearMonthScalar.txt
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
@@ -25,6 +27,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
  * Generated from template DateColumnArithmeticIntervalYearMonthScalar.txt, which covers binary arithmetic
@@ -35,14 +38,18 @@ public class <ClassName> extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
   private int colNum;
-  private long value;
+  private HiveIntervalYearMonth value;
   private int outputColumn;
+  private Date scratchDate1;
+  private Date outputDate;
   private DateTimeMath dtm = new DateTimeMath();
 
   public <ClassName>(int colNum, long value, int outputColumn) {
     this.colNum = colNum;
-    this.value = value;
+    this.value = new HiveIntervalYearMonth((int) value);
     this.outputColumn = outputColumn;
+    scratchDate1 = new Date(0);
+    outputDate = new Date(0);
   }
 
   public <ClassName>() {
@@ -55,19 +62,19 @@ public class <ClassName> extends VectorExpression {
       super.evaluateChildren(batch);
     }
 
-    // Input #1 is type date (epochDays).
-    LongColumnVector inputColVector = (LongColumnVector) batch.cols[colNum];
+    // Input #1 is type date.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
 
     // Output is type date.
     LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
 
     int[] sel = batch.selected;
-    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] inputIsNull = inputColVector1.isNull;
     boolean[] outputIsNull = outputColVector.isNull;
-    outputColVector.noNulls = inputColVector.noNulls;
-    outputColVector.isRepeating = inputColVector.isRepeating;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
     int n = batch.size;
-    long[] vector = inputColVector.vector;
+    long[] vector1 = inputColVector1.vector;
     long[] outputVector = outputColVector.vector;
 
     // return immediately if batch is empty
@@ -75,32 +82,46 @@ public class <ClassName> extends VectorExpression {
       return;
     }
 
-    if (inputColVector.isRepeating) {
-      outputVector[0] = dtm.addMonthsToDays(vector[0], <OperatorSymbol> (int) value);
-
-      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+    if (inputColVector1.isRepeating) {
+      scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      dtm.<OperatorMethod>(
+          scratchDate1, value, outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
+       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
       outputIsNull[0] = inputIsNull[0];
-    } else if (inputColVector.noNulls) {
+    } else if (inputColVector1.noNulls) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = dtm.addMonthsToDays(vector[i], <OperatorSymbol> (int) value);
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchDate1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = dtm.addMonthsToDays(vector[i], <OperatorSymbol> (int) value);
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchDate1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else /* there are nulls */ {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = dtm.addMonthsToDays(vector[i], <OperatorSymbol> (int) value);
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchDate1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = dtm.addMonthsToDays(vector[i], <OperatorSymbol> (int) value);
+          scratchDate1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchDate1, value, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
index 6241ee2..63cebaf 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumn.txt
@@ -18,28 +18,155 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
- * Generated from template DateColumnArithmeticTimestampColumn.txt, which covers binary arithmetic
- * expressions between a date column and a timestamp column.
+ * Generated from template DateColumnArithmeticTimestampColumn.txt, a class
+ * which covers binary arithmetic expressions between a date column and timestamp column.
  */
-public class <ClassName> extends <BaseClassName> {
+public class <ClassName> extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
 
+  private int colNum1;
+  private int colNum2;
+  private int outputColumn;
+  private Timestamp scratchTimestamp1;
+  private DateTimeMath dtm = new DateTimeMath();
+
   public <ClassName>(int colNum1, int colNum2, int outputColumn) {
-    super(colNum1, colNum2, outputColumn);
+    this.colNum1 = colNum1;
+    this.colNum2 = colNum2;
+    this.outputColumn = outputColumn;
+    scratchTimestamp1 = new Timestamp(0);
   }
 
   public <ClassName>() {
-    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type Date (days).  For the math we convert it to a timestamp.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
+
+    // Input #2 is type <OperandType2>.
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum2];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    outputColVector.isRepeating =
+         inputColVector1.isRepeating && inputColVector2.isRepeating
+      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
+      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
+
+    // Handle nulls first
+    NullUtil.propagateNullsColCol(
+      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
+
+    /* Disregard nulls for processing. In other words,
+     * the arithmetic operation is performed even if one or
+     * more inputs are null. This is to improve speed by avoiding
+     * conditional checks in the inner loop.
+     */
+    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      dtm.<OperatorMethod>(
+          scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(0), outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+    } else if (inputColVector1.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else if (inputColVector2.isRepeating) {
+      <HiveOperandType2> value2 = inputColVector2.asScratch<CamelOperandType2>(0);
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, value2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+         }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, value2, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+         dtm.<OperatorMethod>(
+              scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+              scratchTimestamp1, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    }
+
+    /* For the case when the output can have null values, follow
+     * the convention that the data values must be 1 for long and
+     * NaN for double. This is to prevent possible later zero-divide errors
+     * in complex arithmetic expressions like col2 / (col1 - 1)
+     * in the case when some col1 entries are null.
+     */
+    NullUtil.setNullDataEntries<CamelReturnType>(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
   }
 
   @Override
@@ -49,7 +176,7 @@ public class <ClassName> extends <BaseClassName> {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
             VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN,

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumnBase.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumnBase.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumnBase.txt
deleted file mode 100644
index a61b769..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampColumnBase.txt
+++ /dev/null
@@ -1,171 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
- 
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
-/**
- * Generated from template DateColumnArithmeticTimestampColumnBase.txt, a base class
- * which covers binary arithmetic expressions between a date column and timestamp column.
- */
-public abstract class <BaseClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum1;
-  private int colNum2;
-  private int outputColumn;
-  private PisaTimestamp scratchPisaTimestamp;
-
-  public <BaseClassName>(int colNum1, int colNum2, int outputColumn) {
-    this.colNum1 = colNum1;
-    this.colNum2 = colNum2;
-    this.outputColumn = outputColumn;
-    scratchPisaTimestamp = new PisaTimestamp();
-  }
-
-  public <BaseClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    // Input #1 is type Date (epochDays).
-    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
-
-    // Input #2 is type timestamp/interval_day_time.
-    TimestampColumnVector inputColVector2 = (TimestampColumnVector) batch.cols[colNum2];
-
-    // Output is type timestamp.
-    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
-
-    int[] sel = batch.selected;
-    int n = batch.size;
-    long[] vector1 = inputColVector1.vector;
-
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    outputColVector.isRepeating =
-         inputColVector1.isRepeating && inputColVector2.isRepeating
-      || inputColVector1.isRepeating && !inputColVector1.noNulls && inputColVector1.isNull[0]
-      || inputColVector2.isRepeating && !inputColVector2.noNulls && inputColVector2.isNull[0];
-
-    // Handle nulls first  
-    NullUtil.propagateNullsColCol(
-      inputColVector1, inputColVector2, outputColVector, sel, n, batch.selectedInUse);
-
-    /* Disregard nulls for processing. In other words,
-     * the arithmetic operation is performed even if one or
-     * more inputs are null. This is to improve speed by avoiding
-     * conditional checks in the inner loop.
-     */
-    if (inputColVector1.isRepeating && inputColVector2.isRepeating) {
-      outputColVector.<OperatorMethod>(
-          scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-          inputColVector2.asScratchPisaTimestamp(0),
-          0);
-    } else if (inputColVector1.isRepeating) {
-        PisaTimestamp value1 =
-            scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0]));
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputColVector.<OperatorMethod>(
-              value1,
-              inputColVector2.asScratchPisaTimestamp(i),
-              i);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputColVector.<OperatorMethod>(
-              value1,
-              inputColVector2.asScratchPisaTimestamp(i),
-              i);
-        }
-      }
-    } else if (inputColVector2.isRepeating) {
-      PisaTimestamp value2 = inputColVector2.asScratchPisaTimestamp(0);
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputColVector.<OperatorMethod>(
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              value2,
-              i);
-         }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputColVector.<OperatorMethod>(
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              value2,
-              i);
-        }
-      }
-    } else {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputColVector.<OperatorMethod>(
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              inputColVector2.asScratchPisaTimestamp(i),
-              i);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputColVector.<OperatorMethod>(
-              scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-              inputColVector2.asScratchPisaTimestamp(i),
-              i);
-        }
-      }
-    }
-
-    /* For the case when the output can have null values, follow
-     * the convention that the data values must be 1 for long and
-     * NaN for double. This is to prevent possible later zero-divide errors
-     * in complex arithmetic expressions like col2 / (col1 - 1)
-     * in the case when some col1 entries are null.
-     */
-    NullUtil.setNullDataEntriesTimestamp(outputColVector, batch.selectedInUse, sel, n);
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return outputColumn;
-  }
-
-  @Override
-  public String getOutputType() {
-    return "timestamp";
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
index b813d11..7aee529 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalar.txt
@@ -19,32 +19,123 @@
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
 import java.sql.Timestamp;
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
-import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
-import org.apache.hive.common.util.DateUtils;
 
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
- * Generated from template DateColumnArithmeticTimestampScalar.txt, which covers binary arithmetic
- * expressions between a date column and a timestamp scalar.
+ * Generated from template DateColumnArithmeticTimestampScalarBase.txt, a base class
+ * which covers binary arithmetic expressions between a date column and a timestamp scalar.
  */
-public class <ClassName> extends <BaseClassName> {
+public class <ClassName> extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
 
-  public <ClassName>(int colNum, <ScalarHiveTimestampType2> value, int outputColumn) {
-    super(colNum, <PisaTimestampConversion2>, outputColumn);
+  private int colNum;
+  private <HiveOperandType2> value;
+  private int outputColumn;
+  private Timestamp scratchTimestamp1;
+  private DateTimeMath dtm = new DateTimeMath();
+
+  public <ClassName>(int colNum, <HiveOperandType2> value, int outputColumn) {
+    this.colNum = colNum;
+    this.value = value;
+    this.outputColumn = outputColumn;
+    scratchTimestamp1 = new Timestamp(0);
   }
 
   public <ClassName>() {
-    super();
+  }
+
+  @Override
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #1 is type date (days).  For the math we convert it to a timestamp.
+    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector1.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector1.noNulls;
+    outputColVector.isRepeating = inputColVector1.isRepeating;
+    int n = batch.size;
+    long[] vector1 = inputColVector1.vector;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector1.isRepeating) {
+      scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[0]));
+      dtm.<OperatorMethod>(
+          scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector1.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else /* there are nulls */ {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          scratchTimestamp1.setTime(DateWritable.daysToMillis((int) vector1[i]));
+          dtm.<OperatorMethod>(
+             scratchTimestamp1, value, outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
   }
 
   @Override
@@ -54,7 +145,7 @@ public class <ClassName> extends <BaseClassName> {
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
             VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.COLUMN,

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalarBase.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalarBase.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalarBase.txt
deleted file mode 100644
index d64fba0..0000000
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateColumnArithmeticTimestampScalarBase.txt
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
-
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
-import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
-import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.ql.exec.vector.*;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
-
-/**
- * Generated from template DateColumnArithmeticTimestampScalarBase.txt, a base class
- * which covers binary arithmetic expressions between a date column and a timestamp scalar.
- */
-public abstract class <BaseClassName> extends VectorExpression {
-
-  private static final long serialVersionUID = 1L;
-
-  private int colNum;
-  private PisaTimestamp value;
-  private int outputColumn;
-  private PisaTimestamp scratchPisaTimestamp;
-
-  public <BaseClassName>(int colNum, PisaTimestamp value, int outputColumn) {
-    this.colNum = colNum;
-    this.value = value;
-    this.outputColumn = outputColumn;
-    scratchPisaTimestamp = new PisaTimestamp();
-  }
-
-  public <BaseClassName>() {
-  }
-
-  @Override
-  public void evaluate(VectorizedRowBatch batch) {
-
-    if (childExpressions != null) {
-      super.evaluateChildren(batch);
-    }
-
-    // Input #1 is type date (epochDays).
-    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum];
-
-    // Output is type timestamp.
-    TimestampColumnVector outputColVector = (TimestampColumnVector) batch.cols[outputColumn];
-
-    int[] sel = batch.selected;
-    boolean[] inputIsNull = inputColVector1.isNull;
-    boolean[] outputIsNull = outputColVector.isNull;
-    outputColVector.noNulls = inputColVector1.noNulls;
-    outputColVector.isRepeating = inputColVector1.isRepeating;
-    int n = batch.size;
-    long[] vector1 = inputColVector1.vector;
-
-    // return immediately if batch is empty
-    if (n == 0) {
-      return;
-    }
-
-    if (inputColVector1.isRepeating) {
-        outputColVector.<OperatorMethod>(
-          scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[0])),
-          value,
-          0);
-
-      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
-      outputIsNull[0] = inputIsNull[0];
-    } else if (inputColVector1.noNulls) {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputColVector.<OperatorMethod>(
-            scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-            value,
-            i);
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputColVector.<OperatorMethod>(
-            scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-            value,
-            i);
-        }
-      }
-    } else /* there are nulls */ {
-      if (batch.selectedInUse) {
-        for(int j = 0; j != n; j++) {
-          int i = sel[j];
-          outputColVector.<OperatorMethod>(
-            scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-            value,
-            i);
-          outputIsNull[i] = inputIsNull[i];
-        }
-      } else {
-        for(int i = 0; i != n; i++) {
-          outputColVector.<OperatorMethod>(
-            scratchPisaTimestamp.updateFromTimestampMilliseconds(DateWritable.daysToMillis((int) vector1[i])),
-            value,
-            i);
-        }
-        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
-      }
-    }
-
-    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
-  }
-
-  @Override
-  public int getOutputColumn() {
-    return outputColumn;
-  }
-
-  @Override
-  public String getOutputType() {
-    return "timestamp";
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
index 653565e..c68ac34 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticIntervalYearMonthColumn.txt
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
+import java.sql.Date;
+import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
@@ -33,6 +35,7 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
  * Generated from template DateTimeScalarArithmeticIntervalYearMonthColumn.txt.
@@ -44,14 +47,18 @@ public class <ClassName> extends VectorExpression {
   private static final long serialVersionUID = 1L;
 
   private int colNum;
-  private long value;
+  private Date value;
   private int outputColumn;
+  private HiveIntervalYearMonth scratchIntervalYearMonth2;
+  private Date outputDate;
   private DateTimeMath dtm = new DateTimeMath();
 
   public <ClassName>(long value, int colNum, int outputColumn) {
     this.colNum = colNum;
-    this.value = value;
+    this.value = new Date(DateWritable.daysToMillis((int) value));
     this.outputColumn = outputColumn;
+    scratchIntervalYearMonth2 = new HiveIntervalYearMonth();
+    outputDate = new Date(0);
   }
 
   public <ClassName>() {
@@ -70,18 +77,18 @@ public class <ClassName> extends VectorExpression {
     }
 
     // Input #2 is type Interval_Year_Month (months).
-    LongColumnVector inputColVector = (LongColumnVector) batch.cols[colNum];
+    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum];
 
     // Output is type Date.
     LongColumnVector outputColVector = (LongColumnVector) batch.cols[outputColumn];
 
     int[] sel = batch.selected;
-    boolean[] inputIsNull = inputColVector.isNull;
+    boolean[] inputIsNull = inputColVector2.isNull;
     boolean[] outputIsNull = outputColVector.isNull;
-    outputColVector.noNulls = inputColVector.noNulls;
-    outputColVector.isRepeating = inputColVector.isRepeating;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
     int n = batch.size;
-    long[] vector = inputColVector.vector;
+    long[] vector2 = inputColVector2.vector;
     long[] outputVector = outputColVector.vector;
 
     // return immediately if batch is empty
@@ -89,32 +96,46 @@ public class <ClassName> extends VectorExpression {
       return;
     }
 
-    if (inputColVector.isRepeating) {
-      outputVector[0] = dtm.addMonthsToDays(value, <OperatorSymbol> (int) vector[0]);
-
-      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+    if (inputColVector2.isRepeating) {
+      scratchIntervalYearMonth2.set((int) vector2[0]);
+      dtm.<OperatorMethod>(
+          value, scratchIntervalYearMonth2, outputDate);
+      outputVector[0] = DateWritable.dateToDays(outputDate);
+       // Even if there are no nulls, we always copy over entry 0. Simplifies code.
       outputIsNull[0] = inputIsNull[0];
-    } else if (inputColVector.noNulls) {
+    } else if (inputColVector2.noNulls) {
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = dtm.addMonthsToDays(value, <OperatorSymbol> (int) vector[i]);
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value, scratchIntervalYearMonth2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = dtm.addMonthsToDays(value, <OperatorSymbol> (int) vector[i]);
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value, scratchIntervalYearMonth2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
       }
     } else {                         /* there are nulls */
       if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = dtm.addMonthsToDays(value, <OperatorSymbol> (int) vector[i]);
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value, scratchIntervalYearMonth2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
           outputIsNull[i] = inputIsNull[i];
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = dtm.addMonthsToDays(value, <OperatorSymbol> (int) vector[i]);
+          scratchIntervalYearMonth2.set((int) vector2[i]);
+          dtm.<OperatorMethod>(
+              value, scratchIntervalYearMonth2, outputDate);
+          outputVector[i] = DateWritable.dateToDays(outputDate);
         }
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/ca11c393/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
----------------------------------------------------------------------
diff --git a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
index e93bed5..cb6b750 100644
--- a/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
+++ b/ql/src/gen/vectorization/ExpressionTemplates/DateScalarArithmeticTimestampColumn.txt
@@ -18,45 +18,141 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions.gen;
 
+import java.sql.Timestamp;
+
+import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.*;
 
-import org.apache.hadoop.hive.common.type.PisaTimestamp;
 /*
  * Because of the templatized nature of the code, either or both
  * of these ColumnVector imports may be needed. Listing both of them
  * rather than using ....vectorization.*;
  */
-import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.NullUtil;
 import org.apache.hadoop.hive.ql.util.DateTimeMath;
+import org.apache.hadoop.hive.serde2.io.DateWritable;
 
 /**
- * Generated from template DateScalarArithmeticTimestampColumn.txt.
+ * Generated from template DateTimeScalarArithmeticTimestampColumnBase.txt.
  * Implements a vectorized arithmetic operator with a scalar on the left and a
  * column vector on the right. The result is output to an output column vector.
  */
-public class <ClassName> extends <BaseClassName> {
+public class <ClassName> extends VectorExpression {
 
   private static final long serialVersionUID = 1L;
 
+  private int colNum;
+  private Timestamp value;
+  private int outputColumn;
+  private DateTimeMath dtm = new DateTimeMath();
+
   public <ClassName>(long value, int colNum, int outputColumn) {
-    super(value, colNum, outputColumn);
+    this.colNum = colNum;
+    // Scalar input #1 is type date (days).  For the math we convert it to a timestamp.
+    this.value = new Timestamp(0);
+    this.value.setTime(DateWritable.daysToMillis((int) value));
+    this.outputColumn = outputColumn;
   }
 
   public <ClassName>() {
   }
 
   @Override
+  /**
+   * Method to evaluate scalar-column operation in vectorized fashion.
+   *
+   * @batch a package of rows with each column stored in a vector
+   */
+  public void evaluate(VectorizedRowBatch batch) {
+
+    if (childExpressions != null) {
+      super.evaluateChildren(batch);
+    }
+
+    // Input #2 is type <OperandType2>.
+    <InputColumnVectorType2> inputColVector2 = (<InputColumnVectorType2>) batch.cols[colNum];
+
+    // Output is type <ReturnType>.
+    <OutputColumnVectorType> outputColVector = (<OutputColumnVectorType>) batch.cols[outputColumn];
+
+    int[] sel = batch.selected;
+    boolean[] inputIsNull = inputColVector2.isNull;
+    boolean[] outputIsNull = outputColVector.isNull;
+    outputColVector.noNulls = inputColVector2.noNulls;
+    outputColVector.isRepeating = inputColVector2.isRepeating;
+    int n = batch.size;
+
+    // return immediately if batch is empty
+    if (n == 0) {
+      return;
+    }
+
+    if (inputColVector2.isRepeating) {
+      dtm.<OperatorMethod>(
+          value, inputColVector2.asScratch<CamelOperandType2>(0), outputColVector.getScratch<CamelReturnType>());
+      outputColVector.setFromScratch<CamelReturnType>(0);
+      // Even if there are no nulls, we always copy over entry 0. Simplifies code.
+      outputIsNull[0] = inputIsNull[0];
+    } else if (inputColVector2.noNulls) {
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+      }
+    } else {                         /* there are nulls */
+      if (batch.selectedInUse) {
+        for(int j = 0; j != n; j++) {
+          int i = sel[j];
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+          outputIsNull[i] = inputIsNull[i];
+        }
+      } else {
+        for(int i = 0; i != n; i++) {
+          dtm.<OperatorMethod>(
+              value, inputColVector2.asScratch<CamelOperandType2>(i), outputColVector.getScratch<CamelReturnType>());
+          outputColVector.setFromScratch<CamelReturnType>(i);
+        }
+        System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
+      }
+    }
+
+    NullUtil.setNullOutputEntriesColScalar(outputColVector, batch.selectedInUse, sel, n);
+  }
+
+  @Override
+  public int getOutputColumn() {
+    return outputColumn;
+  }
+
+  @Override
+  public String getOutputType() {
+    return "<ReturnType>";
+  }
+
+  @Override
   public VectorExpressionDescriptor.Descriptor getDescriptor() {
     return (new VectorExpressionDescriptor.Builder())
         .setMode(
             VectorExpressionDescriptor.Mode.PROJECTION)
         .setNumArguments(2)
         .setArgumentTypes(
-            VectorExpressionDescriptor.ArgumentType.getType("<OperandType1>"),
+            VectorExpressionDescriptor.ArgumentType.getType("date"),
             VectorExpressionDescriptor.ArgumentType.getType("<OperandType2>"))
         .setInputExpressionTypes(
             VectorExpressionDescriptor.InputExpressionType.SCALAR,