You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sz...@apache.org on 2019/09/07 07:11:32 UTC

[hive] branch master updated: HIVE-22099: Several date related UDFs can't handle Julian dates properly since HIVE-20007 (Adam Szita, reviewed by Jesus Camacho Rodriguez)

This is an automated email from the ASF dual-hosted git repository.

szita pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 287e5d5  HIVE-22099: Several date related UDFs can't handle Julian dates properly since HIVE-20007 (Adam Szita, reviewed by Jesus Camacho Rodriguez)
287e5d5 is described below

commit 287e5d5e4c43beb2bc84a80e342f897494e32c6c
Author: Adam Szita <sz...@cloudera.com>
AuthorDate: Sat Sep 7 09:01:43 2019 +0200

    HIVE-22099: Several date related UDFs can't handle Julian dates properly since HIVE-20007 (Adam Szita, reviewed by Jesus Camacho Rodriguez)
---
 .../exec/vector/expressions/CastDateToString.java  |  6 +--
 .../expressions/VectorUDFTimestampFieldDate.java   |  6 +--
 .../expressions/VectorUDFTimestampFieldString.java |  5 +--
 .../VectorUDFTimestampFieldTimestamp.java          |  5 +--
 .../apache/hadoop/hive/ql/udf/UDFDayOfMonth.java   |  4 +-
 .../org/apache/hadoop/hive/ql/udf/UDFMonth.java    |  4 +-
 .../apache/hadoop/hive/ql/udf/UDFWeekOfYear.java   |  6 +--
 .../org/apache/hadoop/hive/ql/udf/UDFYear.java     |  4 +-
 .../hive/ql/udf/generic/GenericUDFAddMonths.java   |  7 ++--
 .../hive/ql/udf/generic/GenericUDFDateFormat.java  |  4 +-
 .../ql/udf/generic/GenericUDFMonthsBetween.java    |  6 +--
 .../apache/hadoop/hive/ql/util/DateTimeMath.java   | 13 +++++++
 .../vector/expressions/TestVectorTypeCasts.java    | 11 +++---
 .../ql/udf/generic/TestGenericUDFAddMonths.java    |  2 +
 .../ql/udf/generic/TestGenericUDFDateFormat.java   | 15 +++++++-
 .../udf/generic/TestGenericUDFMonthsBetween.java   |  3 ++
 ql/src/test/queries/clientpositive/udf_day.q       |  3 ++
 ql/src/test/queries/clientpositive/udf_month.q     |  7 +++-
 .../test/queries/clientpositive/udf_weekofyear.q   |  4 ++
 ql/src/test/queries/clientpositive/udf_year.q      |  5 +++
 .../llap/vectorized_timestamp_funcs.q.out          |  6 +--
 .../spark/vectorized_timestamp_funcs.q.out         |  6 +--
 ql/src/test/results/clientpositive/udf_day.q.out   | 18 +++++++++
 ql/src/test/results/clientpositive/udf_month.q.out | 45 +++++++++++++++-------
 .../results/clientpositive/udf_weekofyear.q.out    | 27 +++++++++++++
 ql/src/test/results/clientpositive/udf_year.q.out  | 37 ++++++++++++++++++
 .../vectorized_timestamp_funcs.q.out               |  6 +--
 27 files changed, 204 insertions(+), 61 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
index 8a07232..461711f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDateToString.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import org.apache.hadoop.hive.common.format.datetime.HiveSqlDateTimeFormatter;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 
 import java.sql.Date;
 import java.text.SimpleDateFormat;
-import java.util.TimeZone;
 
 public class CastDateToString extends LongToStringUnaryUDF {
   private static final long serialVersionUID = 1L;
@@ -34,13 +34,13 @@ public class CastDateToString extends LongToStringUnaryUDF {
   public CastDateToString() {
     super();
     formatter = new SimpleDateFormat("yyyy-MM-dd");
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+    formatter.setCalendar(DateTimeMath.getProlepticGregorianCalendarUTC());
   }
 
   public CastDateToString(int inputColumn, int outputColumnNum) {
     super(inputColumn, outputColumnNum);
     formatter = new SimpleDateFormat("yyyy-MM-dd");
-    formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+    formatter.setCalendar(DateTimeMath.getProlepticGregorianCalendarUTC());
   }
 
   // The assign method will be overridden for CHAR and VARCHAR.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
index 837de9d..ac6519b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldDate.java
@@ -20,13 +20,13 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 import java.util.Calendar;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
@@ -34,7 +34,6 @@ import org.apache.hive.common.util.DateUtils;
 
 import com.google.common.base.Preconditions;
 
-
 /**
  * Abstract class to return various fields from a Timestamp or Date.
  */
@@ -44,8 +43,7 @@ public abstract class VectorUDFTimestampFieldDate extends VectorExpression {
   protected final int colNum;
   protected final int field;
 
-  protected transient final Calendar calendar = Calendar.getInstance(
-      TimeZone.getTimeZone("UTC"));
+  protected final transient Calendar calendar = DateTimeMath.getProlepticGregorianCalendarUTC();
 
   public VectorUDFTimestampFieldDate(int field, int colNum, int outputColumnNum) {
     super(outputColumnNum);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
index 9acfa86..9ef9094 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldString.java
@@ -23,10 +23,10 @@ import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 
 import java.text.ParseException;
 import java.util.Calendar;
-import java.util.TimeZone;
 
 /**
  * Abstract class to return various fields from a String.
@@ -40,8 +40,7 @@ public abstract class VectorUDFTimestampFieldString extends VectorExpression {
   protected final int fieldLength;
   private static final String patternMin = "0000-00-00 00:00:00.000000000";
   private static final String patternMax = "9999-19-99 29:59:59.999999999";
-  protected transient final Calendar calendar = Calendar.getInstance(
-      TimeZone.getTimeZone("UTC"));
+  protected final transient Calendar calendar = DateTimeMath.getProlepticGregorianCalendarUTC();
 
   public VectorUDFTimestampFieldString(int colNum, int outputColumnNum, int fieldStart, int fieldLength) {
     super(outputColumnNum);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
index 94e8b47..142d05f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorUDFTimestampFieldTimestamp.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
 import java.util.Arrays;
 import java.util.Calendar;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
@@ -28,6 +27,7 @@ import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hive.common.util.DateUtils;
@@ -44,8 +44,7 @@ public abstract class VectorUDFTimestampFieldTimestamp extends VectorExpression
   protected final int colNum;
   protected final int field;
 
-  protected transient final Calendar calendar = Calendar.getInstance(
-      TimeZone.getTimeZone("UTC"));
+  protected final transient Calendar calendar = DateTimeMath.getProlepticGregorianCalendarUTC();
 
   public VectorUDFTimestampFieldTimestamp(int field, int colNum, int outputColumnNum) {
     super(outputColumnNum);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
index 72fa263..cbb8fd0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFDayOfMonth.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDayOfMonthTime
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -37,7 +38,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 import org.apache.hadoop.io.IntWritable;
 
 import java.util.Calendar;
-import java.util.TimeZone;
 
 /**
  * UDFDayOfMonth.
@@ -60,7 +60,7 @@ public class UDFDayOfMonth extends GenericUDF {
   private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
   private final IntWritable output = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar calendar = DateTimeMath.getProlepticGregorianCalendarUTC();
 
   @Override
   public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
index 1a8b2da..e55dbc4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFMonth.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFMonthTimestamp
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -37,7 +38,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 import org.apache.hadoop.io.IntWritable;
 
 import java.util.Calendar;
-import java.util.TimeZone;
 
 /**
  * UDFMonth.
@@ -60,7 +60,7 @@ public class UDFMonth extends GenericUDF {
   private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
   private final IntWritable output = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar calendar = DateTimeMath.getProlepticGregorianCalendarUTC();
 
 
   @Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
index 18ca9a7..6d64da1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFWeekOfYear.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.ql.udf;
 
 import java.util.Calendar;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.Timestamp;
@@ -30,6 +29,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearDate
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearString;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFWeekOfYearTimestamp;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.io.IntWritable;
@@ -39,7 +39,7 @@ import org.apache.hadoop.io.Text;
  * UDFWeekOfYear.
  *
  */
-@Description(name = "yearweek",
+@Description(name = "weekofyear",
     value = "_FUNC_(date) - Returns the week of the year of the given date. A week "
     + "is considered to start on a Monday and week 1 is the first week with >3 days.",
     extended = "Examples:\n"
@@ -52,7 +52,7 @@ public class UDFWeekOfYear extends UDF {
 
   private final IntWritable result = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar calendar = DateTimeMath.getProlepticGregorianCalendarUTC();
 
 
   public UDFWeekOfYear() {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
index fcbb57f..8ac1dc5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFYear.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFYearTimestamp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.NDV;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -37,7 +38,6 @@ import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectIn
 import org.apache.hadoop.io.IntWritable;
 
 import java.util.Calendar;
-import java.util.TimeZone;
 
 /**
  * UDFYear.
@@ -60,7 +60,7 @@ public class UDFYear extends GenericUDF {
   private transient PrimitiveObjectInspector.PrimitiveCategory[] inputTypes = new PrimitiveObjectInspector.PrimitiveCategory[1];
   private final IntWritable output = new IntWritable();
 
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar calendar = DateTimeMath.getProlepticGregorianCalendarUTC();
 
 
   @Override
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
index 6df0913..7ca8dad 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFAddMonths.java
@@ -24,7 +24,6 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.Timestamp;
@@ -32,6 +31,7 @@ import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
@@ -64,7 +64,7 @@ public class GenericUDFAddMonths extends GenericUDF {
   private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[3];
   private final Text output = new Text();
   private transient SimpleDateFormat formatter = null;
-  private final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar calendar = DateTimeMath.getProlepticGregorianCalendarUTC();
   private transient Integer numMonthsConst;
   private transient boolean isNumMonthsConst;
 
@@ -82,7 +82,7 @@ public class GenericUDFAddMonths extends GenericUDF {
         String fmtStr = getConstantStringValue(arguments, 2);
         if (fmtStr != null) {
           formatter = new SimpleDateFormat(fmtStr);
-          formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+          formatter.setCalendar(calendar);
         }
       } else {
         throw new UDFArgumentTypeException(2, getFuncName() + " only takes constant as "
@@ -92,6 +92,7 @@ public class GenericUDFAddMonths extends GenericUDF {
     if (formatter == null) {
       //If the DateFormat is not provided by the user or is invalid, use the default format YYYY-MM-dd
       formatter = DateUtils.getDateFormat();
+      formatter.setCalendar(calendar);
     }
 
     // the function should support both short date and full timestamp format
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
index 6d3e86f..a30b916 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateFormat.java
@@ -21,7 +21,6 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping.STRING_GROUP;
 
 import java.text.SimpleDateFormat;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.Timestamp;
@@ -29,6 +28,7 @@ import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
@@ -80,7 +80,7 @@ public class GenericUDFDateFormat extends GenericUDF {
       if (fmtStr != null) {
         try {
           formatter = new SimpleDateFormat(fmtStr);
-          formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+          formatter.setCalendar(DateTimeMath.getProlepticGregorianCalendarUTC());
         } catch (IllegalArgumentException e) {
           // ignore
         }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
index e0db417..ad128d8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFMonthsBetween.java
@@ -29,13 +29,13 @@ import static org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveO
 
 import java.math.BigDecimal;
 import java.util.Calendar;
-import java.util.TimeZone;
 
 import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -66,8 +66,8 @@ public class GenericUDFMonthsBetween extends GenericUDF {
   private transient PrimitiveCategory[] tsInputTypes = new PrimitiveCategory[2];
   private transient Converter[] dtConverters = new Converter[2];
   private transient PrimitiveCategory[] dtInputTypes = new PrimitiveCategory[2];
-  private final Calendar cal1 = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
-  private final Calendar cal2 = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
+  private final Calendar cal1 = DateTimeMath.getProlepticGregorianCalendarUTC();
+  private final Calendar cal2 = DateTimeMath.getProlepticGregorianCalendarUTC();
   private final DoubleWritable output = new DoubleWritable();
   private boolean isRoundOffNeeded = true;
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
index 20acfa2..53395b6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/util/DateTimeMath.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hive.common.util.DateUtils;
 
 import java.util.Calendar;
+import java.util.GregorianCalendar;
 import java.util.TimeZone;
 import java.util.concurrent.TimeUnit;
 
@@ -600,4 +601,16 @@ public class DateTimeMath {
     result.set(totalSeconds, nanosResult.nanos);
     return true;
   }
+
+  /**
+   * TODO - this is a temporary fix for handling Julian calendar dates.
+   * Returns a Gregorian calendar that can be used from year 0+ instead of default 1582.10.15.
+   * This is desirable for some UDFs that work on dates which normally would use Julian calendar.
+   * @return the calendar
+   */
+  public static Calendar getProlepticGregorianCalendarUTC() {
+    GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC".intern()));
+    calendar.setGregorianChange(new java.util.Date(Long.MIN_VALUE));
+    return calendar;
+  }
 }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
index a449ea1..ea4aeae 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorTypeCasts.java
@@ -25,6 +25,7 @@ import static org.junit.Assert.assertTrue;
 import java.math.BigDecimal;
 import java.nio.charset.StandardCharsets;
 import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.Random;
 import java.util.concurrent.TimeUnit;
@@ -33,6 +34,7 @@ import org.junit.Assert;
 
 import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.ql.util.DateTimeMath;
 import org.apache.hadoop.hive.serde2.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
@@ -74,9 +76,6 @@ public class TestVectorTypeCasts {
     Assert.assertEquals(1, resultV.vector[6]);
   }
 
-  // +8 hours from PST to GMT, needed because java.sql.Date will subtract 8 hours from final
-  // value because VM in test time zone is PST.
-  private static final long TIME_DIFFERENCE = 28800000L;
   @Test
   public void testCastDateToString() throws HiveException {
     int[] intValues = new int[100];
@@ -86,10 +85,12 @@ public class TestVectorTypeCasts {
     VectorExpression expr = new CastDateToString(0, 1);
     expr.evaluate(b);
 
+    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
+    formatter.setCalendar(DateTimeMath.getProlepticGregorianCalendarUTC());
+
     String expected, result;
     for (int i = 0; i < intValues.length; i++) {
-      expected =
-          new java.sql.Date(DateWritableV2.daysToMillis(intValues[i]) + TIME_DIFFERENCE).toString();
+      expected = formatter.format(new java.sql.Date(DateWritableV2.daysToMillis(intValues[i])));
       byte[] subbyte = Arrays.copyOfRange(resultV.vector[i], resultV.start[i],
           resultV.start[i] + resultV.length[i]);
       result = new String(subbyte, StandardCharsets.UTF_8);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
index eb104bd..1b5fd42 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFAddMonths.java
@@ -66,6 +66,8 @@ public class TestGenericUDFAddMonths {
     runAndVerify("2016-02-29", -12, "2015-02-28", udf);
     runAndVerify("2016-01-29", 1, "2016-02-29", udf);
     runAndVerify("2016-02-29", -1, "2016-01-31", udf);
+    runAndVerify("1001-10-05", 1, "1001-11-05", udf);
+    runAndVerify("1582-10-05", 1, "1582-11-05", udf);
 
     // ts str
     runAndVerify("2014-01-14 10:30:00", 1, "2014-02-14", udf);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
index f0a5d3f..246fbd3 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFDateFormat.java
@@ -45,7 +45,7 @@ public class TestGenericUDFDateFormat {
     Text fmtText = new Text("EEEE");
     ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory
         .getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
-    ObjectInspector[] arguments = { valueOI0, valueOI1 };
+    ObjectInspector[] arguments = {valueOI0, valueOI1};
 
     udf.initialize(arguments);
 
@@ -157,6 +157,19 @@ public class TestGenericUDFDateFormat {
     runAndVerifyStr("2015-04-05", fmtText, null, udf);
   }
 
+
+  @Test
+  public void testJulianDates() throws HiveException {
+    GenericUDFDateFormat udf = new GenericUDFDateFormat();
+    ObjectInspector valueOI0 = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    Text fmtText = new Text("dd---MM--yyyy");
+    ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory
+            .getPrimitiveWritableConstantObjectInspector(TypeInfoFactory.stringTypeInfo, fmtText);
+    ObjectInspector[] arguments = { valueOI0, valueOI1 };
+    udf.initialize(arguments);
+    runAndVerifyStr("1001-01-05", fmtText, "05---01--1001", udf);
+  }
+
   private void runAndVerifyStr(String str, Text fmtText, String expResult, GenericUDF udf)
       throws HiveException {
     DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new Text(str) : null);
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
index 707e841..e386cfa 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFMonthsBetween.java
@@ -131,6 +131,9 @@ public class TestGenericUDFMonthsBetween {
     runTestStr(null, null, null, udf);
 
     runTestStr("2003-04-23", "2002-04-24", 11.96774194, udf);
+
+    //Test for Julian vs Gregorian dates
+    runTestStr("1582-10-05", "1582-11-05", -1., udf);
   }
 
 
diff --git a/ql/src/test/queries/clientpositive/udf_day.q b/ql/src/test/queries/clientpositive/udf_day.q
index fb9b57d..4b2ad42 100644
--- a/ql/src/test/queries/clientpositive/udf_day.q
+++ b/ql/src/test/queries/clientpositive/udf_day.q
@@ -1,2 +1,5 @@
 DESCRIBE FUNCTION day;
 DESCRIBE FUNCTION EXTENDED day;
+
+select month('2005-01-02');
+select month('1001-01-05');
diff --git a/ql/src/test/queries/clientpositive/udf_month.q b/ql/src/test/queries/clientpositive/udf_month.q
index 0a582ba..2b4363e 100644
--- a/ql/src/test/queries/clientpositive/udf_month.q
+++ b/ql/src/test/queries/clientpositive/udf_month.q
@@ -1,2 +1,5 @@
-DESCRIBE FUNCTION minute;
-DESCRIBE FUNCTION EXTENDED minute;
+DESCRIBE FUNCTION month;
+DESCRIBE FUNCTION EXTENDED month;
+
+select month('2005-01-02');
+select month('1001-01-05');
diff --git a/ql/src/test/queries/clientpositive/udf_weekofyear.q b/ql/src/test/queries/clientpositive/udf_weekofyear.q
index 18ab828..0122e68 100644
--- a/ql/src/test/queries/clientpositive/udf_weekofyear.q
+++ b/ql/src/test/queries/clientpositive/udf_weekofyear.q
@@ -7,3 +7,7 @@ DESCRIBE FUNCTION EXTENDED weekofyear;
 SELECT weekofyear('1980-01-01'), weekofyear('1980-01-06'), weekofyear('1980-01-07'), weekofyear('1980-12-31'),
 weekofyear('1984-1-1'), weekofyear('2008-02-20 00:00:00'), weekofyear('1980-12-28 23:59:59'), weekofyear('1980-12-29 23:59:59')
 FROM src tablesample (1 rows);
+
+SELECT weekofyear('1007-01-01 10:13:14');
+SELECT weekofyear('1008-01-01 10:13:14');
+SELECT weekofyear('1009-01-01 10:13:14');
diff --git a/ql/src/test/queries/clientpositive/udf_year.q b/ql/src/test/queries/clientpositive/udf_year.q
new file mode 100644
index 0000000..23d7313
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/udf_year.q
@@ -0,0 +1,5 @@
+DESCRIBE FUNCTION year;
+DESCRIBE FUNCTION EXTENDED year;
+
+select year('2005-01-02');
+select year('1001-01-05');
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out
index 38b0537..fbbf094 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_timestamp_funcs.q.out
@@ -415,7 +415,7 @@ POSTHOOK: Input: default@alltypesorc_string
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:55.451	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:55.451	NULL	1969-12-31 15:59:55.451
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:58.174	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:58.174	NULL	1969-12-31 15:59:58.174
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:58.456	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:58.456	NULL	1969-12-31 15:59:58.456
--45479202281	528	10	25	25	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
+-45479202281	528	10	27	27	44	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
 1632453512	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
 1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
 1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
@@ -614,7 +614,7 @@ POSTHOOK: Input: default@alltypesorc_string
 -30	1969	12	31	31	1	23	59	30
 -30	1969	12	31	31	1	23	59	30
 -30	1969	12	31	31	1	23	59	30
--62018199211	4	9	24	22	39	18	26	29
+-62018199211	4	9	22	22	39	18	26	29
 1365554626	2013	4	10	10	15	0	43	46
 206730996125	8521	1	16	16	3	20	42	5
 271176065	1978	8	5	5	31	14	41	5
@@ -969,7 +969,7 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_wrong
 #### A masked pattern was here ####
-NULL	2	12	2	NULL	49	4	40	39
+NULL	2	11	30	NULL	48	4	40	39
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
diff --git a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
index 5671a7c..b58d008 100644
--- a/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorized_timestamp_funcs.q.out
@@ -413,7 +413,7 @@ POSTHOOK: Input: default@alltypesorc_string
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:55.451	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:55.451	NULL	1969-12-31 15:59:55.451
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:58.174	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:58.174	NULL	1969-12-31 15:59:58.174
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:58.456	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:58.456	NULL	1969-12-31 15:59:58.456
--45479202281	528	10	25	25	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
+-45479202281	528	10	27	27	44	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
 1632453512	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
 1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
 1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
@@ -610,7 +610,7 @@ POSTHOOK: Input: default@alltypesorc_string
 -30	1969	12	31	31	1	23	59	30
 -30	1969	12	31	31	1	23	59	30
 -30	1969	12	31	31	1	23	59	30
--62018199211	4	9	24	22	39	18	26	29
+-62018199211	4	9	22	22	39	18	26	29
 1365554626	2013	4	10	10	15	0	43	46
 206730996125	8521	1	16	16	3	20	42	5
 271176065	1978	8	5	5	31	14	41	5
@@ -961,7 +961,7 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_wrong
 #### A masked pattern was here ####
-NULL	2	12	2	NULL	49	4	40	39
+NULL	2	11	30	NULL	48	4	40	39
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT
diff --git a/ql/src/test/results/clientpositive/udf_day.q.out b/ql/src/test/results/clientpositive/udf_day.q.out
index 48e8161..8e12825 100644
--- a/ql/src/test/results/clientpositive/udf_day.q.out
+++ b/ql/src/test/results/clientpositive/udf_day.q.out
@@ -18,3 +18,21 @@ param can be one of:
   30
 Function class:org.apache.hadoop.hive.ql.udf.UDFDayOfMonth
 Function type:BUILTIN
+PREHOOK: query: select month('2005-01-02')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select month('2005-01-02')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+1
+PREHOOK: query: select month('1001-01-05')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select month('1001-01-05')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+1
diff --git a/ql/src/test/results/clientpositive/udf_month.q.out b/ql/src/test/results/clientpositive/udf_month.q.out
index c07c98f..068f2ff 100644
--- a/ql/src/test/results/clientpositive/udf_month.q.out
+++ b/ql/src/test/results/clientpositive/udf_month.q.out
@@ -1,20 +1,37 @@
-PREHOOK: query: DESCRIBE FUNCTION minute
+PREHOOK: query: DESCRIBE FUNCTION month
 PREHOOK: type: DESCFUNCTION
-POSTHOOK: query: DESCRIBE FUNCTION minute
+POSTHOOK: query: DESCRIBE FUNCTION month
 POSTHOOK: type: DESCFUNCTION
-minute(param) - Returns the minute component of the string/timestamp/interval
-PREHOOK: query: DESCRIBE FUNCTION EXTENDED minute
+month(param) - Returns the month component of the date/timestamp/interval
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED month
 PREHOOK: type: DESCFUNCTION
-POSTHOOK: query: DESCRIBE FUNCTION EXTENDED minute
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED month
 POSTHOOK: type: DESCFUNCTION
-minute(param) - Returns the minute component of the string/timestamp/interval
+month(param) - Returns the month component of the date/timestamp/interval
 param can be one of:
-1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'HH:mm:ss'.
-2. A timestamp value
-3. A day-time interval valueExample:
-   > SELECT minute('2009-07-30 12:58:59') FROM src LIMIT 1;
-  58
-  > SELECT minute('12:58:59') FROM src LIMIT 1;
-  58
-Function class:org.apache.hadoop.hive.ql.udf.UDFMinute
+1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'.
+2. A date value
+3. A timestamp value
+4. A year-month interval valueExample:
+  > SELECT month('2009-07-30') FROM src LIMIT 1;
+  7
+Function class:org.apache.hadoop.hive.ql.udf.UDFMonth
 Function type:BUILTIN
+PREHOOK: query: select month('2005-01-02')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select month('2005-01-02')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+1
+PREHOOK: query: select month('1001-01-05')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select month('1001-01-05')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+1
diff --git a/ql/src/test/results/clientpositive/udf_weekofyear.q.out b/ql/src/test/results/clientpositive/udf_weekofyear.q.out
index 6e6c661..f003af05 100644
--- a/ql/src/test/results/clientpositive/udf_weekofyear.q.out
+++ b/ql/src/test/results/clientpositive/udf_weekofyear.q.out
@@ -28,3 +28,30 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 1	1	2	1	52	8	52	1
+PREHOOK: query: SELECT weekofyear('1007-01-01 10:13:14')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT weekofyear('1007-01-01 10:13:14')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+1
+PREHOOK: query: SELECT weekofyear('1008-01-01 10:13:14')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT weekofyear('1008-01-01 10:13:14')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+53
+PREHOOK: query: SELECT weekofyear('1009-01-01 10:13:14')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT weekofyear('1009-01-01 10:13:14')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+52
diff --git a/ql/src/test/results/clientpositive/udf_year.q.out b/ql/src/test/results/clientpositive/udf_year.q.out
new file mode 100644
index 0000000..49ca056
--- /dev/null
+++ b/ql/src/test/results/clientpositive/udf_year.q.out
@@ -0,0 +1,37 @@
+PREHOOK: query: DESCRIBE FUNCTION year
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION year
+POSTHOOK: type: DESCFUNCTION
+year(param) - Returns the year component of the date/timestamp/interval
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED year
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED year
+POSTHOOK: type: DESCFUNCTION
+year(param) - Returns the year component of the date/timestamp/interval
+param can be one of:
+1. A string in the format of 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'.
+2. A date value
+3. A timestamp value
+4. A year-month interval valueExample:
+   > SELECT year('2009-07-30') FROM src LIMIT 1;
+  2009
+Function class:org.apache.hadoop.hive.ql.udf.UDFYear
+Function type:BUILTIN
+PREHOOK: query: select year('2005-01-02')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select year('2005-01-02')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+2005
+PREHOOK: query: select year('1001-01-05')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select year('1001-01-05')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+1001
diff --git a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
index 9a42dc0..8190599 100644
--- a/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_timestamp_funcs.q.out
@@ -398,7 +398,7 @@ POSTHOOK: Input: default@alltypesorc_string
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:55.451	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:55.451	NULL	1969-12-31 15:59:55.451
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:58.174	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:58.174	NULL	1969-12-31 15:59:58.174
 -29	1969	12	31	31	1	23	59	30	NULL	1969-12-31 23:59:30.929	1969-12-31 15:59:58.456	1319-02-02 16:31:57.778	1969-12-31 23:59:30.929	1969-12-31 15:59:58.456	NULL	1969-12-31 15:59:58.456
--45479202281	528	10	25	25	43	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
+-45479202281	528	10	27	27	44	8	15	18	true	0528-10-27 08:15:18.941718273	NULL	0528-10-27 08:15:18.941718273	2000-12-18 08:42:30.0005	0528-10-27 08:15:18.941718273	0528-10-27 08:15:18.941718273	NULL
 1632453512	2021	9	24	24	38	3	18	32	NULL	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1974-10-04 17:21:03.989	NULL	1974-10-04 17:21:03.989
 1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	1999-10-03 16:59:10.396903939	NULL	1999-10-03 16:59:10.396903939
 1632453512	2021	9	24	24	38	3	18	32	false	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	1319-02-02 16:31:57.778	2021-09-24 03:18:32.4	2010-04-08 02:43:35.861742727	NULL	2010-04-08 02:43:35.861742727
@@ -580,7 +580,7 @@ POSTHOOK: Input: default@alltypesorc_string
 -30	1969	12	31	31	1	23	59	30
 -30	1969	12	31	31	1	23	59	30
 -30	1969	12	31	31	1	23	59	30
--62018199211	4	9	24	22	39	18	26	29
+-62018199211	4	9	22	22	39	18	26	29
 1365554626	2013	4	10	10	15	0	43	46
 206730996125	8521	1	16	16	3	20	42	5
 271176065	1978	8	5	5	31	14	41	5
@@ -901,7 +901,7 @@ ORDER BY c1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@alltypesorc_wrong
 #### A masked pattern was here ####
-NULL	2	12	2	NULL	49	4	40	39
+NULL	2	11	30	NULL	48	4	40	39
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL	NULL
 PREHOOK: query: EXPLAIN VECTORIZATION EXPRESSION  SELECT