You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jd...@apache.org on 2015/01/26 20:24:15 UTC
svn commit: r1654878 - in /hive/trunk/ql/src:
java/org/apache/hadoop/hive/ql/udf/generic/
test/org/apache/hadoop/hive/ql/udf/ test/queries/clientpositive/
test/results/clientpositive/
Author: jdere
Date: Mon Jan 26 19:24:14 2015
New Revision: 1654878
URL: http://svn.apache.org/r1654878
Log:
HIVE-9396: date_add()/date_sub() should allow tinyint/smallint/bigint arguments in addition to int (Sergio Peña via Jason Dere)
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateAdd.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateSub.java
hive/trunk/ql/src/test/queries/clientpositive/udf_date_add.q
hive/trunk/ql/src/test/queries/clientpositive/udf_date_sub.q
hive/trunk/ql/src/test/results/clientpositive/udf_date_add.q.out
hive/trunk/ql/src/test/results/clientpositive/udf_date_sub.q.out
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java?rev=1654878&r1=1654877&r2=1654878&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateAdd.java Mon Jan 26 19:24:14 2015
@@ -32,7 +32,9 @@ import org.apache.hadoop.hive.ql.exec.ve
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateAddColScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateAddScalarCol;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -66,10 +68,8 @@ import org.apache.hadoop.io.IntWritable;
@VectorizedExpressions({VectorUDFDateAddColScalar.class, VectorUDFDateAddScalarCol.class, VectorUDFDateAddColCol.class})
public class GenericUDFDateAdd extends GenericUDF {
private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private transient TimestampConverter timestampConverter;
- private transient Converter textConverter;
- private transient Converter dateWritableConverter;
- private transient Converter intWritableConverter;
+ private transient Converter dateConverter;
+ private transient Converter daysConverter;
private transient PrimitiveCategory inputType1;
private transient PrimitiveCategory inputType2;
private final Calendar calendar = Calendar.getInstance();
@@ -99,16 +99,16 @@ public class GenericUDFDateAdd extends G
case VARCHAR:
case CHAR:
inputType1 = PrimitiveCategory.STRING;
- textConverter = ObjectInspectorConverters.getConverter(
+ dateConverter = ObjectInspectorConverters.getConverter(
(PrimitiveObjectInspector) arguments[0],
PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
- timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0],
+ dateConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0],
PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
- dateWritableConverter = ObjectInspectorConverters.getConverter(
+ dateConverter = ObjectInspectorConverters.getConverter(
(PrimitiveObjectInspector) arguments[0],
PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
@@ -119,52 +119,76 @@ public class GenericUDFDateAdd extends G
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
- if (inputType2 != PrimitiveCategory.INT) {
- throw new UDFArgumentException(
- " DATE_ADD() only takes INT types as second argument, got " + inputType2);
+ switch (inputType2) {
+ case BYTE:
+ daysConverter = ObjectInspectorConverters.getConverter(
+ (PrimitiveObjectInspector) arguments[1],
+ PrimitiveObjectInspectorFactory.writableByteObjectInspector);
+ break;
+ case SHORT:
+ daysConverter = ObjectInspectorConverters.getConverter(
+ (PrimitiveObjectInspector) arguments[1],
+ PrimitiveObjectInspectorFactory.writableShortObjectInspector);
+ break;
+ case INT:
+ daysConverter = ObjectInspectorConverters.getConverter(
+ (PrimitiveObjectInspector) arguments[1],
+ PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+ break;
+ default:
+ throw new UDFArgumentException(
+ " DATE_ADD() only takes TINYINT/SMALLINT/INT types as second argument, got " + inputType2);
}
- intWritableConverter = ObjectInspectorConverters.getConverter(
- (PrimitiveObjectInspector) arguments[1],
- PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+
return outputOI;
}
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
-
if (arguments[0].get() == null) {
return null;
}
- IntWritable toBeAdded = (IntWritable) intWritableConverter.convert(arguments[1].get());
- if (toBeAdded == null) {
+
+ Object daysWritableObject = daysConverter.convert(arguments[1].get());
+ if (daysWritableObject == null) {
+ return null;
+ }
+
+ int toBeAdded;
+ if (daysWritableObject instanceof ByteWritable) {
+ toBeAdded = ((ByteWritable) daysWritableObject).get();
+ } else if (daysWritableObject instanceof ShortWritable) {
+ toBeAdded = ((ShortWritable) daysWritableObject).get();
+ } else if (daysWritableObject instanceof IntWritable) {
+ toBeAdded = ((IntWritable) daysWritableObject).get();
+ } else {
return null;
}
switch (inputType1) {
case STRING:
- String dateString = textConverter.convert(arguments[0].get()).toString();
+ String dateString = dateConverter.convert(arguments[0].get()).toString();
try {
calendar.setTime(formatter.parse(dateString.toString()));
- calendar.add(Calendar.DAY_OF_MONTH, toBeAdded.get());
} catch (ParseException e) {
return null;
}
break;
case TIMESTAMP:
- Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get()))
+ Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get()))
.getTimestamp();
calendar.setTime(ts);
- calendar.add(Calendar.DAY_OF_MONTH, toBeAdded.get());
break;
case DATE:
- DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
+ DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get());
calendar.setTime(dw.get());
- calendar.add(Calendar.DAY_OF_MONTH, toBeAdded.get());
break;
default:
throw new UDFArgumentException(
"DATE_ADD() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
+
+ calendar.add(Calendar.DAY_OF_MONTH, toBeAdded);
Date newDate = calendar.getTime();
output.set(formatter.format(newDate));
return output;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java?rev=1654878&r1=1654877&r2=1654878&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFDateSub.java Mon Jan 26 19:24:14 2015
@@ -32,7 +32,9 @@ import org.apache.hadoop.hive.ql.exec.ve
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubColScalar;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorUDFDateSubScalarCol;
import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
@@ -66,10 +68,8 @@ import org.apache.hadoop.io.IntWritable;
@VectorizedExpressions({VectorUDFDateSubColScalar.class, VectorUDFDateSubScalarCol.class, VectorUDFDateSubColCol.class})
public class GenericUDFDateSub extends GenericUDF {
private transient SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
- private transient TimestampConverter timestampConverter;
- private transient Converter textConverter;
- private transient Converter dateWritableConverter;
- private transient Converter intWritableConverter;
+ private transient Converter dateConverter;
+ private transient Converter daysConverter;
private transient PrimitiveCategory inputType1;
private transient PrimitiveCategory inputType2;
private final Calendar calendar = Calendar.getInstance();
@@ -99,16 +99,16 @@ public class GenericUDFDateSub extends G
case VARCHAR:
case CHAR:
inputType1 = PrimitiveCategory.STRING;
- textConverter = ObjectInspectorConverters.getConverter(
+ dateConverter = ObjectInspectorConverters.getConverter(
(PrimitiveObjectInspector) arguments[0],
PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
- timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0],
+ dateConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0],
PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
- dateWritableConverter = ObjectInspectorConverters.getConverter(
+ dateConverter = ObjectInspectorConverters.getConverter(
(PrimitiveObjectInspector) arguments[0],
PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
@@ -119,52 +119,76 @@ public class GenericUDFDateSub extends G
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
- if (inputType2 != PrimitiveCategory.INT) {
- throw new UDFArgumentException(
- " DATE_SUB() only takes INT types as second argument, got " + inputType2);
+ switch (inputType2) {
+ case BYTE:
+ daysConverter = ObjectInspectorConverters.getConverter(
+ (PrimitiveObjectInspector) arguments[1],
+ PrimitiveObjectInspectorFactory.writableByteObjectInspector);
+ break;
+ case SHORT:
+ daysConverter = ObjectInspectorConverters.getConverter(
+ (PrimitiveObjectInspector) arguments[1],
+ PrimitiveObjectInspectorFactory.writableShortObjectInspector);
+ break;
+ case INT:
+ daysConverter = ObjectInspectorConverters.getConverter(
+ (PrimitiveObjectInspector) arguments[1],
+ PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+ break;
+ default:
+ throw new UDFArgumentException(
+ " DATE_ADD() only takes TINYINT/SMALLINT/INT/BIGINT types as second argument, got " + inputType2);
}
- intWritableConverter = ObjectInspectorConverters.getConverter(
- (PrimitiveObjectInspector) arguments[1],
- PrimitiveObjectInspectorFactory.writableIntObjectInspector);
+
return outputOI;
}
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
-
if (arguments[0].get() == null) {
return null;
}
- IntWritable toBeSubed = (IntWritable) intWritableConverter.convert(arguments[1].get());
- if (toBeSubed == null) {
+
+ Object daysWritableObject = daysConverter.convert(arguments[1].get());
+ if (daysWritableObject == null) {
+ return null;
+ }
+
+ int toBeSubed;
+ if (daysWritableObject instanceof ByteWritable) {
+ toBeSubed = ((ByteWritable) daysWritableObject).get();
+ } else if (daysWritableObject instanceof ShortWritable) {
+ toBeSubed = ((ShortWritable) daysWritableObject).get();
+ } else if (daysWritableObject instanceof IntWritable) {
+ toBeSubed = ((IntWritable) daysWritableObject).get();
+ } else {
return null;
}
switch (inputType1) {
case STRING:
- String dateString = textConverter.convert(arguments[0].get()).toString();
+ String dateString = dateConverter.convert(arguments[0].get()).toString();
try {
calendar.setTime(formatter.parse(dateString.toString()));
- calendar.add(Calendar.DAY_OF_MONTH, -toBeSubed.get());
} catch (ParseException e) {
return null;
}
break;
case TIMESTAMP:
- Timestamp ts = ((TimestampWritable) timestampConverter.convert(arguments[0].get()))
+ Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get()))
.getTimestamp();
calendar.setTime(ts);
- calendar.add(Calendar.DAY_OF_MONTH, -toBeSubed.get());
break;
case DATE:
- DateWritable dw = (DateWritable) dateWritableConverter.convert(arguments[0].get());
+ DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get());
calendar.setTime(dw.get());
- calendar.add(Calendar.DAY_OF_MONTH, -toBeSubed.get());
break;
default:
throw new UDFArgumentException(
"DATE_SUB() only takes STRING/TIMESTAMP/DATEWRITABLE types, got " + inputType1);
}
+
+ calendar.add(Calendar.DAY_OF_MONTH, -toBeSubed);
Date newDate = calendar.getTime();
output.set(formatter.format(newDate));
return output;
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateAdd.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateAdd.java?rev=1654878&r1=1654877&r2=1654878&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateAdd.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateAdd.java Mon Jan 26 19:24:14 2015
@@ -23,6 +23,8 @@ import java.sql.Timestamp;
import junit.framework.TestCase;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
@@ -111,4 +113,33 @@ public class TestGenericUDFDateAdd exten
assertNull("date_add() both args null", udf.evaluate(args));
}
+ public void testByteDataTypeAsDays() throws HiveException {
+ GenericUDFDateAdd udf = new GenericUDFDateAdd();
+ ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
+ ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaByteObjectInspector;
+ ObjectInspector[] arguments = {valueOI1, valueOI2};
+
+ udf.initialize(arguments);
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4"));
+ DeferredObject[] args = {valueObj1, valueObj2};
+ Text output = (Text) udf.evaluate(args);
+
+ assertEquals("date_add() test for BYTE failed ", "2009-07-24", output.toString());
+ }
+
+ public void testShortDataTypeAsDays() throws HiveException {
+ GenericUDFDateAdd udf = new GenericUDFDateAdd();
+ ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
+ ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaShortObjectInspector;
+ ObjectInspector[] arguments = {valueOI1, valueOI2};
+
+ udf.initialize(arguments);
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj2 = new DeferredJavaObject(new Short("4"));
+ DeferredObject[] args = {valueObj1, valueObj2};
+ Text output = (Text) udf.evaluate(args);
+
+ assertEquals("date_add() test for SHORT failed ", "2009-07-24", output.toString());
+ }
}
Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateSub.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateSub.java?rev=1654878&r1=1654877&r2=1654878&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateSub.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/TestGenericUDFDateSub.java Mon Jan 26 19:24:14 2015
@@ -111,4 +111,33 @@ public class TestGenericUDFDateSub exten
assertNull("date_add() both args null", udf.evaluate(args));
}
+ public void testByteDataTypeAsDays() throws HiveException {
+ GenericUDFDateSub udf = new GenericUDFDateSub();
+ ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
+ ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaByteObjectInspector;
+ ObjectInspector[] arguments = {valueOI1, valueOI2};
+
+ udf.initialize(arguments);
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj2 = new DeferredJavaObject(new Byte("4"));
+ DeferredObject[] args = {valueObj1, valueObj2};
+ Text output = (Text) udf.evaluate(args);
+
+ assertEquals("date_add() test for BYTE failed ", "2009-07-16", output.toString());
+ }
+
+ public void testShortDataTypeAsDays() throws HiveException {
+ GenericUDFDateSub udf = new GenericUDFDateSub();
+ ObjectInspector valueOI1 = PrimitiveObjectInspectorFactory.writableDateObjectInspector;
+ ObjectInspector valueOI2 = PrimitiveObjectInspectorFactory.javaShortObjectInspector;
+ ObjectInspector[] arguments = {valueOI1, valueOI2};
+
+ udf.initialize(arguments);
+ DeferredObject valueObj1 = new DeferredJavaObject(new DateWritable(new Date(109, 06, 20)));
+ DeferredObject valueObj2 = new DeferredJavaObject(new Short("4"));
+ DeferredObject[] args = {valueObj1, valueObj2};
+ Text output = (Text) udf.evaluate(args);
+
+ assertEquals("date_add() test for SHORT failed ", "2009-07-16", output.toString());
+ }
}
Modified: hive/trunk/ql/src/test/queries/clientpositive/udf_date_add.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/udf_date_add.q?rev=1654878&r1=1654877&r2=1654878&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/udf_date_add.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/udf_date_add.q Mon Jan 26 19:24:14 2015
@@ -1,2 +1,7 @@
DESCRIBE FUNCTION date_add;
DESCRIBE FUNCTION EXTENDED date_add;
+
+-- Test different numeric data types for date_add
+SELECT date_add('1900-01-01', cast(10 as tinyint)),
+ date_add('1900-01-01', cast(10 as smallint)),
+ date_add('1900-01-01', cast(10 as int));
\ No newline at end of file
Modified: hive/trunk/ql/src/test/queries/clientpositive/udf_date_sub.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/udf_date_sub.q?rev=1654878&r1=1654877&r2=1654878&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/udf_date_sub.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/udf_date_sub.q Mon Jan 26 19:24:14 2015
@@ -1,2 +1,7 @@
DESCRIBE FUNCTION date_sub;
DESCRIBE FUNCTION EXTENDED date_sub;
+
+-- Test different numeric data types for date_add
+SELECT date_sub('1900-01-01', cast(10 as tinyint)),
+ date_sub('1900-01-01', cast(10 as smallint)),
+ date_sub('1900-01-01', cast(10 as int));
\ No newline at end of file
Modified: hive/trunk/ql/src/test/results/clientpositive/udf_date_add.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_date_add.q.out?rev=1654878&r1=1654877&r2=1654878&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_date_add.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_date_add.q.out Mon Jan 26 19:24:14 2015
@@ -12,3 +12,18 @@ start_date is a string in the format 'yy
Example:
> SELECT date_add('2009-07-30', 1) FROM src LIMIT 1;
'2009-07-31'
+PREHOOK: query: -- Test different numeric data types for date_add
+SELECT date_add('1900-01-01', cast(10 as tinyint)),
+ date_add('1900-01-01', cast(10 as smallint)),
+ date_add('1900-01-01', cast(10 as int))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: -- Test different numeric data types for date_add
+SELECT date_add('1900-01-01', cast(10 as tinyint)),
+ date_add('1900-01-01', cast(10 as smallint)),
+ date_add('1900-01-01', cast(10 as int))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+1900-01-11 1900-01-11 1900-01-11
Modified: hive/trunk/ql/src/test/results/clientpositive/udf_date_sub.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_date_sub.q.out?rev=1654878&r1=1654877&r2=1654878&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_date_sub.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_date_sub.q.out Mon Jan 26 19:24:14 2015
@@ -12,3 +12,18 @@ start_date is a string in the format 'yy
Example:
> SELECT date_sub('2009-07-30', 1) FROM src LIMIT 1;
'2009-07-29'
+PREHOOK: query: -- Test different numeric data types for date_add
+SELECT date_sub('1900-01-01', cast(10 as tinyint)),
+ date_sub('1900-01-01', cast(10 as smallint)),
+ date_sub('1900-01-01', cast(10 as int))
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: -- Test different numeric data types for date_add
+SELECT date_sub('1900-01-01', cast(10 as tinyint)),
+ date_sub('1900-01-01', cast(10 as smallint)),
+ date_sub('1900-01-01', cast(10 as int))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+1899-12-22 1899-12-22 1899-12-22