You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2012/01/12 21:26:20 UTC
svn commit: r1230758 - in /hive/trunk/ql/src:
java/org/apache/hadoop/hive/ql/exec/
java/org/apache/hadoop/hive/ql/udf/generic/ test/queries/clientnegative/
test/queries/clientpositive/ test/results/clientnegative/
test/results/clientpositive/
Author: namit
Date: Thu Jan 12 20:26:20 2012
New Revision: 1230758
URL: http://svn.apache.org/viewvc?rev=1230758&view=rev
Log:
HIVE-2695 Add PRINTF() Udf (Zhenxiao Luo via namit)
Added:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java
hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong1.q
hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong2.q
hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong3.q
hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong4.q
hive/trunk/ql/src/test/queries/clientpositive/udf_printf.q
hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong1.q.out
hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong2.q.out
hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong3.q.out
hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong4.q.out
hive/trunk/ql/src/test/results/clientpositive/udf_printf.q.out
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1230758&r1=1230757&r2=1230758&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Thu Jan 12 20:26:20 2012
@@ -185,6 +185,7 @@ import org.apache.hadoop.hive.ql.udf.gen
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFPrintf;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFReflect;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSentences;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFSize;
@@ -449,6 +450,7 @@ public final class FunctionRegistry {
registerGenericUDF("sentences", GenericUDFSentences.class);
registerGenericUDF("map_keys", GenericUDFMapKeys.class);
registerGenericUDF("map_values", GenericUDFMapValues.class);
+ registerGenericUDF("printf", GenericUDFPrintf.class);
registerGenericUDF("from_utc_timestamp", GenericUDFFromUtcTimestamp.class);
registerGenericUDF("to_utc_timestamp", GenericUDFToUtcTimestamp.class);
Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFPrintf.java Thu Jan 12 20:26:20 2012
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
+import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
+import org.apache.hadoop.io.Text;
+
+import java.util.Formatter;
+import java.util.Locale;
+import java.util.ArrayList;
+
+/**
+ * Generic UDF for printf function
+ * <code>printf(String format, Obj... args)</code>.
+ *
+ * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF
+ */
+@Description(name = "printf",
+ value = "_FUNC_(String format, Obj... args) - "
+ + "function that can format strings according to printf-style format strings",
+ extended = "Example:\n"
+ + " > SELECT _FUNC_(\"Hello World %d %s\", 100, \"days\")"
+ + "FROM src LIMIT 1;\n"
+ + " \"Hello World 100 days\"")
+public class GenericUDFPrintf extends GenericUDF {
+ private ObjectInspector[] argumentOIs;
+ private final Text resultText = new Text();
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+ if (arguments.length < 1) {
+ throw new UDFArgumentLengthException(
+ "The function PRINTF(String format, Obj... args) needs at least one arguments.");
+ }
+
+ if (arguments[0].getTypeName() != Constants.STRING_TYPE_NAME
+ && arguments[0].getTypeName() != Constants.VOID_TYPE_NAME) {
+ throw new UDFArgumentTypeException(0, "Argument 1"
+ + " of function PRINTF must be \"" + Constants.STRING_TYPE_NAME
+ + "\", but \"" + arguments[0].getTypeName() + "\" was found.");
+ }
+
+ for (int i = 1; i < arguments.length; i++) {
+ if (!arguments[i].getCategory().equals(Category.PRIMITIVE)){
+ throw new UDFArgumentTypeException(i, "Argument " + (i + 1)
+ + " of function PRINTF must be \"" + Category.PRIMITIVE
+ + "\", but \"" + arguments[i].getTypeName() + "\" was found.");
+ }
+ }
+
+ argumentOIs = arguments;
+ return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+ }
+
+ @Override
+ public Object evaluate(DeferredObject[] arguments) throws HiveException {
+ StringBuilder sb = new StringBuilder();
+ Formatter formatter = new Formatter(sb, Locale.US);
+
+ String pattern = ((StringObjectInspector) argumentOIs[0])
+ .getPrimitiveJavaObject(arguments[0].get());
+
+ ArrayList argumentList = new ArrayList();
+ for (int i = 1; i < arguments.length; i++) {
+ switch (((PrimitiveObjectInspector)argumentOIs[i]).getPrimitiveCategory()) {
+ case BOOLEAN:
+ argumentList.add(((BooleanObjectInspector)argumentOIs[i]).get(arguments[i].get()));
+ break;
+ case BYTE:
+ argumentList.add(((ByteObjectInspector)argumentOIs[i]).get(arguments[i].get()));
+ break;
+ case SHORT:
+ argumentList.add(((ShortObjectInspector)argumentOIs[i]).get(arguments[i].get()));
+ break;
+ case INT:
+ argumentList.add(((IntObjectInspector)argumentOIs[i]).get(arguments[i].get()));
+ break;
+ case LONG:
+ argumentList.add(((LongObjectInspector)argumentOIs[i]).get(arguments[i].get()));
+ break;
+ case FLOAT:
+ argumentList.add(((FloatObjectInspector)argumentOIs[i]).get(arguments[i].get()));
+ break;
+ case DOUBLE:
+ argumentList.add(((DoubleObjectInspector)argumentOIs[i]).get(arguments[i].get()));
+ break;
+ case STRING:
+ argumentList.add(((StringObjectInspector)argumentOIs[i])
+ .getPrimitiveJavaObject(arguments[i].get()));
+ break;
+ case TIMESTAMP:
+ argumentList.add(((TimestampObjectInspector)argumentOIs[i])
+ .getPrimitiveJavaObject(arguments[i].get()));
+ break;
+ case BINARY:
+ argumentList.add(arguments[i].get());
+ break;
+ default:
+ argumentList.add(arguments[i].get());
+ break;
+ }
+ }
+ formatter.format(pattern, argumentList.toArray());
+
+ resultText.set(sb.toString());
+ return resultText;
+ }
+
+ @Override
+ public String getDisplayString(String[] children) {
+ assert (children.length >= 2);
+ StringBuilder sb = new StringBuilder();
+ sb.append("printf(");
+ for (int i = 0; i < children.length - 1; i++) {
+ sb.append(children[i]).append(", ");
+ }
+ sb.append(children[children.length - 1]).append(")");
+ return sb.toString();
+ }
+}
Added: hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong1.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong1.q?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong1.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong1.q Thu Jan 12 20:26:20 2012
@@ -0,0 +1,2 @@
+-- invalid argument length
+SELECT printf() FROM src LIMIT 1;
Added: hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong2.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong2.q?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong2.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong2.q Thu Jan 12 20:26:20 2012
@@ -0,0 +1,2 @@
+-- invalid argument type
+SELECT printf(100) FROM src LIMIT 1;
Added: hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong3.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong3.q?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong3.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong3.q Thu Jan 12 20:26:20 2012
@@ -0,0 +1,2 @@
+-- invalid argument type
+SELECT printf("Hello World %s", array("invalid", "argument")) FROM src LIMIT 1;
Added: hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong4.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong4.q?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong4.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/udf_printf_wrong4.q Thu Jan 12 20:26:20 2012
@@ -0,0 +1,2 @@
+-- invalid argument type
+SELECT printf("Hello World %s", array("invalid", "argument")) FROM src LIMIT 1;
Added: hive/trunk/ql/src/test/queries/clientpositive/udf_printf.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/udf_printf.q?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/udf_printf.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/udf_printf.q Thu Jan 12 20:26:20 2012
@@ -0,0 +1,35 @@
+use default;
+-- Test printf() UDF
+
+DESCRIBE FUNCTION printf;
+DESCRIBE FUNCTION EXTENDED printf;
+
+EXPLAIN
+SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1;
+
+-- Test Primitive Types
+SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1;
+SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1;
+
+-- Test NULL Values
+SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1;
+
+-- Test Timestamp
+create table timestamp_udf (t timestamp);
+from src
+ insert overwrite table timestamp_udf
+ select '2011-05-06 07:08:09.1234567' limit 1;
+select printf("timestamp: %s", t) from timestamp_udf;
+drop table timestamp_udf;
+
+-- Test Binary
+CREATE TABLE binay_udf(key binary, value int)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '9'
+STORED AS TEXTFILE;
+LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE binay_udf;
+create table dest1 (key binary, value int);
+insert overwrite table dest1 select transform(*) using 'cat' as key binary, value int from binay_udf;
+select value, printf("format key: %s", key) from dest1;
+drop table dest1;
+drop table binary_udf;
Added: hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong1.q.out?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong1.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong1.q.out Thu Jan 12 20:26:20 2012
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Line 2:7 Arguments length mismatch 'printf': The function PRINTF(String format, Obj... args) needs at least one arguments.
Added: hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong2.q.out?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong2.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong2.q.out Thu Jan 12 20:26:20 2012
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Line 2:14 Argument type mismatch '100': Argument 1 of function PRINTF must be "string", but "int" was found.
Added: hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong3.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong3.q.out?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong3.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong3.q.out Thu Jan 12 20:26:20 2012
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Line 2:32 Argument type mismatch '"argument"': Argument 2 of function PRINTF must be "PRIMITIVE", but "array<string>" was found.
Added: hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong4.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong4.q.out?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong4.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/udf_printf_wrong4.q.out Thu Jan 12 20:26:20 2012
@@ -0,0 +1 @@
+FAILED: Error in semantic analysis: Line 2:32 Argument type mismatch '"argument"': Argument 2 of function PRINTF must be "PRIMITIVE", but "array<string>" was found.
Modified: hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out?rev=1230758&r1=1230757&r2=1230758&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out Thu Jan 12 20:26:20 2012
@@ -111,6 +111,7 @@ pmod
positive
pow
power
+printf
radians
rand
reflect
Added: hive/trunk/ql/src/test/results/clientpositive/udf_printf.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/udf_printf.q.out?rev=1230758&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/udf_printf.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/udf_printf.q.out Thu Jan 12 20:26:20 2012
@@ -0,0 +1,206 @@
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: -- Test printf() UDF
+
+DESCRIBE FUNCTION printf
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: -- Test printf() UDF
+
+DESCRIBE FUNCTION printf
+POSTHOOK: type: DESCFUNCTION
+printf(String format, Obj... args) - function that can format strings according to printf-style format strings
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED printf
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED printf
+POSTHOOK: type: DESCFUNCTION
+printf(String format, Obj... args) - function that can format strings according to printf-style format strings
+Example:
+ > SELECT printf("Hello World %d %s", 100, "days")FROM src LIMIT 1;
+ "Hello World 100 days"
+PREHOOK: query: EXPLAIN
+SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION printf "Hello World %d %s" 100 "days"))) (TOK_LIMIT 1)))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ src
+ TableScan
+ alias: src
+ Select Operator
+ expressions:
+ expr: printf('Hello World %d %s', 100, 'days')
+ type: string
+ outputColumnNames: _col0
+ Limit
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: 1
+
+
+PREHOOK: query: -- Test Primitive Types
+SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Test Primitive Types
+SELECT printf("Hello World %d %s", 100, "days") FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+Hello World 100 days
+PREHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT printf("All Type Test: %b, %c, %d, %e, %+10.4f, %g, %h, %s, %a", false, 65, 15000, 12.3400, 27183.240051, 2300.41, 50, "corret", 256.125) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+All Type Test: false, A, 15000, 1.234000e+01, +27183.2401, 2300.41, 32, corret, 0x1.002p8
+PREHOOK: query: -- Test NULL Values
+SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: -- Test NULL Values
+SELECT printf("Color %s, String Null: %s, number1 %d, number2 %05d, Integer Null: %d, hex %#x, float %5.2f Double Null: %f\n", "red", NULL, 123456, 89, NULL, 255, 3.14159, NULL) FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+Color red, String Null: null, number1 123456, number2 00089, Integer Null: null, hex 0xff, float 3.14 Double Null: null
+PREHOOK: query: -- Test Timestamp
+create table timestamp_udf (t timestamp)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- Test Timestamp
+create table timestamp_udf (t timestamp)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@timestamp_udf
+PREHOOK: query: from src
+ insert overwrite table timestamp_udf
+ select '2011-05-06 07:08:09.1234567' limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@timestamp_udf
+POSTHOOK: query: from src
+ insert overwrite table timestamp_udf
+ select '2011-05-06 07:08:09.1234567' limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@timestamp_udf
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+PREHOOK: query: select printf("timestamp: %s", t) from timestamp_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@timestamp_udf
+#### A masked pattern was here ####
+POSTHOOK: query: select printf("timestamp: %s", t) from timestamp_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@timestamp_udf
+#### A masked pattern was here ####
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+timestamp: 2011-05-06 07:08:09.1234567
+PREHOOK: query: drop table timestamp_udf
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@timestamp_udf
+PREHOOK: Output: default@timestamp_udf
+POSTHOOK: query: drop table timestamp_udf
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@timestamp_udf
+POSTHOOK: Output: default@timestamp_udf
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+PREHOOK: query: -- Test Binary
+CREATE TABLE binay_udf(key binary, value int)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '9'
+STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- Test Binary
+CREATE TABLE binay_udf(key binary, value int)
+ROW FORMAT DELIMITED
+FIELDS TERMINATED BY '9'
+STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@binay_udf
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE binay_udf
+PREHOOK: type: LOAD
+PREHOOK: Output: default@binay_udf
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/string.txt' INTO TABLE binay_udf
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@binay_udf
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+PREHOOK: query: create table dest1 (key binary, value int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table dest1 (key binary, value int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+PREHOOK: query: insert overwrite table dest1 select transform(*) using 'cat' as key binary, value int from binay_udf
+PREHOOK: type: QUERY
+PREHOOK: Input: default@binay_udf
+PREHOOK: Output: default@dest1
+POSTHOOK: query: insert overwrite table dest1 select transform(*) using 'cat' as key binary, value int from binay_udf
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@binay_udf
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.key SCRIPT [(binay_udf)binay_udf.FieldSchema(name:key, type:binary, comment:null), (binay_udf)binay_udf.FieldSchema(name:value, type:int, comment:null), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(binay_udf)binay_udf.FieldSchema(name:key, type:binary, comment:null), (binay_udf)binay_udf.FieldSchema(name:value, type:int, comment:null), ]
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+PREHOOK: query: select value, printf("format key: %s", key) from dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: select value, printf("format key: %s", key) from dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: Lineage: dest1.key SCRIPT [(binay_udf)binay_udf.FieldSchema(name:key, type:binary, comment:null), (binay_udf)binay_udf.FieldSchema(name:value, type:int, comment:null), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(binay_udf)binay_udf.FieldSchema(name:key, type:binary, comment:null), (binay_udf)binay_udf.FieldSchema(name:value, type:int, comment:null), ]
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+1 format key: 61 00 62 63 01 02 01 00
+2 format key: 00 74 65 73 74 00
+3 format key: 01 74 65 73 74 01
+4 format key: 74 65 73 74 00 74 65 73 74
+5 format key: 74 65 73 74 01 74 65 73 74
+6 format key: 74 65 73 74 00 00 01 01 74 65 73 74
+7 format key: 00 00 00
+8 format key: 01 01 01
+9 format key: 00 01 00
+10 format key: 01 00 01
+PREHOOK: query: drop table dest1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@dest1
+PREHOOK: Output: default@dest1
+POSTHOOK: query: drop table dest1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@dest1
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.key SCRIPT [(binay_udf)binay_udf.FieldSchema(name:key, type:binary, comment:null), (binay_udf)binay_udf.FieldSchema(name:value, type:int, comment:null), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(binay_udf)binay_udf.FieldSchema(name:key, type:binary, comment:null), (binay_udf)binay_udf.FieldSchema(name:value, type:int, comment:null), ]
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []
+PREHOOK: query: drop table binary_udf
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table binary_udf
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Lineage: dest1.key SCRIPT [(binay_udf)binay_udf.FieldSchema(name:key, type:binary, comment:null), (binay_udf)binay_udf.FieldSchema(name:value, type:int, comment:null), ]
+POSTHOOK: Lineage: dest1.value SCRIPT [(binay_udf)binay_udf.FieldSchema(name:key, type:binary, comment:null), (binay_udf)binay_udf.FieldSchema(name:value, type:int, comment:null), ]
+POSTHOOK: Lineage: timestamp_udf.t EXPRESSION []