You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by th...@apache.org on 2017/05/11 16:13:50 UTC

hive git commit: HIVE-16584 : Warning messages should use LogHelper.printInfo instead of printing to the infoStream directly (Peter Vary via Thejas Nair)

Repository: hive
Updated Branches:
  refs/heads/master 91c4fa975 -> b48ec4042


HIVE-16584 : Warning messages should use LogHelper.printInfo instead of printing to the infoStream directly (Peter Vary via Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b48ec404
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b48ec404
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b48ec404

Branch: refs/heads/master
Commit: b48ec4042feda15ebc0472726631bace8dab2932
Parents: 91c4fa9
Author: Peter Vary <pv...@cloudera.com>
Authored: Thu May 11 09:13:44 2017 -0700
Committer: Thejas M Nair <th...@hortonworks.com>
Committed: Thu May 11 09:13:44 2017 -0700

----------------------------------------------------------------------
 .../test/resources/testconfiguration.properties |   4 +-
 .../hive/ql/log/LogDivertAppenderForTest.java   |   5 +-
 .../ql/optimizer/calcite/RelOptHiveTable.java   |   2 +-
 .../optimizer/physical/CrossProductCheck.java   |   3 +-
 .../physical/SparkCrossProductCheck.java        |   3 +-
 .../hadoop/hive/ql/session/SessionState.java    |  89 +++++++++++++-
 .../ql/udf/generic/GenericUDFUnixTimeStamp.java |   9 +-
 .../clientpositive/beeline/mapjoin2.q.out       |  91 +++++++++++++++
 .../beeline/udf_unix_timestamp.q.out            | 117 +++++++++++++++++++
 9 files changed, 302 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties
index 5ab3076..7510ddc 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -753,6 +753,7 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
 
 beeline.positive.include=drop_with_concurrency.q,\
   escape_comments.q,\
+  mapjoin2.q,\
   smb_mapjoin_1.q,\
   smb_mapjoin_10.q,\
   smb_mapjoin_11.q,\
@@ -762,7 +763,8 @@ beeline.positive.include=drop_with_concurrency.q,\
   smb_mapjoin_2.q,\
   smb_mapjoin_3.q,\
   smb_mapjoin_7.q,\
-  select_dummy_source.q
+  select_dummy_source.q,\
+  udf_unix_timestamp.q
 
 minimr.query.negative.files=cluster_tasklog_retrieval.q,\
   file_with_header_footer_negative.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/ql/src/java/org/apache/hadoop/hive/ql/log/LogDivertAppenderForTest.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/log/LogDivertAppenderForTest.java b/ql/src/java/org/apache/hadoop/hive/ql/log/LogDivertAppenderForTest.java
index 966c264..465844d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/log/LogDivertAppenderForTest.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/log/LogDivertAppenderForTest.java
@@ -54,7 +54,10 @@ public final class LogDivertAppenderForTest {
     public Result filter(LogEvent event) {
       if (event.getLevel().equals(Level.INFO) && "SessionState".equals(event.getLoggerName())) {
         if (event.getMessage().getFormattedMessage().startsWith("PREHOOK:")
-            || event.getMessage().getFormattedMessage().startsWith("POSTHOOK:")) {
+            || event.getMessage().getFormattedMessage().startsWith("POSTHOOK:")
+            || event.getMessage().getFormattedMessage().startsWith("unix_timestamp(void)")
+            || event.getMessage().getFormattedMessage().startsWith("Warning: ")
+            ) {
           return Result.ACCEPT;
         }
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
index 9faccd7..1d49568 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/RelOptHiveTable.java
@@ -396,7 +396,7 @@ public class RelOptHiveTable extends RelOptAbstractTable {
         HiveConf conf = SessionState.getSessionConf();
         if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_SHOW_WARNINGS)) {
           LogHelper console = SessionState.getConsole();
-          console.printInfoNoLog(logMsg);
+          console.printInfo(logMsg);
         }
       } else {
         LOG.error(logMsg);

http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java
index 9ad33fd..f5abaf1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/CrossProductCheck.java
@@ -128,8 +128,7 @@ public class CrossProductCheck implements PhysicalPlanResolver, Dispatcher {
   }
 
   private void warn(String msg) {
-    SessionState.getConsole().getInfoStream().println(
-        String.format("Warning: %s", msg));
+    SessionState.getConsole().printInfo("Warning: " + msg, false);
   }
 
   private void checkMapJoins(MapRedTask mrTsk) throws SemanticException {

http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java
index 92d2191..3e8727c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SparkCrossProductCheck.java
@@ -85,8 +85,7 @@ public class SparkCrossProductCheck implements PhysicalPlanResolver, Dispatcher
   }
 
   private void warn(String msg) {
-    SessionState.getConsole().getInfoStream().println(
-        String.format("Warning: %s", msg));
+    SessionState.getConsole().printInfo("Warning: " + msg, false);
   }
 
   private void checkShuffleJoin(SparkWork sparkWork) throws SemanticException {

http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 7692512..479a938 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -1068,57 +1068,126 @@ public class SessionState {
       this.isSilent = isSilent;
     }
 
+    /**
+     * Get the console output stream for HiveServer2 or HiveCli.
+     * @return The output stream
+     */
     public PrintStream getOutStream() {
       SessionState ss = SessionState.get();
       return ((ss != null) && (ss.out != null)) ? ss.out : System.out;
     }
 
+    /**
+     * Get the console info stream for HiveServer2 or HiveCli.
+     * @return The info stream
+     */
     public static PrintStream getInfoStream() {
       SessionState ss = SessionState.get();
       return ((ss != null) && (ss.info != null)) ? ss.info : getErrStream();
     }
 
+    /**
+     * Get the console error stream for HiveServer2 or HiveCli.
+     * @return The error stream
+     */
     public static PrintStream getErrStream() {
       SessionState ss = SessionState.get();
       return ((ss != null) && (ss.err != null)) ? ss.err : System.err;
     }
 
+    /**
+     * Get the child process output stream for HiveServer2 or HiveCli.
+     * @return The child process output stream
+     */
     public PrintStream getChildOutStream() {
       SessionState ss = SessionState.get();
       return ((ss != null) && (ss.childOut != null)) ? ss.childOut : System.out;
     }
 
+    /**
+     * Get the child process error stream for HiveServer2 or HiveCli.
+     * @return The child process error stream
+     */
     public PrintStream getChildErrStream() {
       SessionState ss = SessionState.get();
       return ((ss != null) && (ss.childErr != null)) ? ss.childErr : System.err;
     }
 
+    /**
+     * Is the logging to the info stream is enabled, or not.
+     * @return True if the logging is disabled to the HiveServer2 or HiveCli info stream
+     */
     public boolean getIsSilent() {
       SessionState ss = SessionState.get();
       // use the session or the one supplied in constructor
       return (ss != null) ? ss.getIsSilent() : isSilent;
     }
 
+    /**
+     * Logs into the log file.
+     * BeeLine uses the operation log file to show the logs to the user, so depending on the
+     * BeeLine settings it could be shown to the user.
+     * @param info The log message
+     */
     public void logInfo(String info) {
       logInfo(info, null);
     }
 
+    /**
+     * Logs into the log file. Handles an extra detail which will not be printed if null.
+     * BeeLine uses the operation log file to show the logs to the user, so depending on the
+     * BeeLine settings it could be shown to the user.
+     * @param info The log message
+     * @param detail Extra detail to log which will be not printed if null
+     */
     public void logInfo(String info, String detail) {
       LOG.info(info + StringUtils.defaultString(detail));
     }
 
+    /**
+     * Logs info into the log file, and if the LogHelper is not silent then into the HiveServer2 or
+     * HiveCli info stream too.
+     * BeeLine uses the operation log file to show the logs to the user, so depending on the
+     * BeeLine settings it could be shown to the user.
+     * @param info The log message
+     */
     public void printInfo(String info) {
       printInfo(info, null);
     }
 
+    /**
+     * Logs info into the log file, and if not silent then into the HiveServer2 or HiveCli info
+     * stream too. The isSilent parameter is used instead of the LogHelper isSilent attribute.
+     * BeeLine uses the operation log file to show the logs to the user, so depending on the
+     * BeeLine settings it could be shown to the user.
+     * @param info The log message
+     * @param isSilent If true then the message will not be printed to the info stream
+     */
     public void printInfo(String info, boolean isSilent) {
       printInfo(info, null, isSilent);
     }
 
+    /**
+     * Logs info into the log file, and if the LogHelper is not silent then into the HiveServer2 or
+     * HiveCli info stream too. Handles an extra detail which will not be printed if null.
+     * BeeLine uses the operation log file to show the logs to the user, so depending on the
+     * BeeLine settings it could be shown to the user.
+     * @param info The log message
+     * @param detail Extra detail to log which will be not printed if null
+     */
     public void printInfo(String info, String detail) {
       printInfo(info, detail, getIsSilent());
     }
 
+    /**
+     * Logs info into the log file, and if not silent then into the HiveServer2 or HiveCli info
+     * stream too. Handles an extra detail which will not be printed if null.
+     * BeeLine uses the operation log file to show the logs to the user, so depending on the
+     * BeeLine settings it could be shown to the user.
+     * @param info The log message
+     * @param detail Extra detail to log which will be not printed if null
+     * @param isSilent If true then the message will not be printed to the info stream
+     */
     public void printInfo(String info, String detail, boolean isSilent) {
       if (!isSilent) {
         getInfoStream().println(info);
@@ -1126,16 +1195,24 @@ public class SessionState {
       LOG.info(info + StringUtils.defaultString(detail));
     }
 
-    public void printInfoNoLog(String info) {
-      if (!getIsSilent()) {
-        getInfoStream().println(info);
-      }
-    }
-
+    /**
+     * Logs an error into the log file, and into the HiveServer2 or HiveCli error stream too.
+     * BeeLine uses the operation log file to show the logs to the user, so depending on the
+     * BeeLine settings it could be shown to the user.
+     * @param error The log message
+     */
     public void printError(String error) {
       printError(error, null);
     }
 
+    /**
+     * Logs an error into the log file, and into the HiveServer2 or HiveCli error stream too.
+     * Handles an extra detail which will not be printed if null.
+     * BeeLine uses the operation log file to show the logs to the user, so depending on the
+     * BeeLine settings it could be shown to the user.
+     * @param error The log message
+     * @param detail Extra detail to log which will be not printed if null
+     */
     public void printError(String error, String detail) {
       getErrStream().println(error);
       LOG.error(error + StringUtils.defaultString(detail));

http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
index 118acdc..aaa1bd4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUnixTimeStamp.java
@@ -18,15 +18,12 @@
 
 package org.apache.hadoop.hive.ql.udf.generic;
 
-import java.io.PrintStream;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.LongWritable;
@@ -49,11 +46,7 @@ public class GenericUDFUnixTimeStamp extends GenericUDFToUnixTimeStamp {
         currentTimestamp = new LongWritable(0);
         setValueFromTs(currentTimestamp, SessionState.get().getQueryCurrentTimestamp());
         String msg = "unix_timestamp(void) is deprecated. Use current_timestamp instead.";
-        LOG.warn(msg);
-        PrintStream stream = LogHelper.getInfoStream();
-        if (stream != null) {
-          stream.println(msg);
-        }
+        SessionState.getConsole().printInfo(msg, false);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out b/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out
new file mode 100644
index 0000000..08e398a
--- /dev/null
+++ b/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out
@@ -0,0 +1,91 @@
+PREHOOK: query: create table tbl (n bigint, t string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tbl
+POSTHOOK: query: create table tbl (n bigint, t string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tbl
+PREHOOK: query: insert into tbl values (1, 'one')
+PREHOOK: type: QUERY
+PREHOOK: Output: default@tbl
+POSTHOOK: query: insert into tbl values (1, 'one')
+POSTHOOK: type: QUERY
+POSTHOOK: Output: default@tbl
+POSTHOOK: Lineage: tbl.n EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: tbl.t SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+PREHOOK: query: insert into tbl values(2, 'two')
+PREHOOK: type: QUERY
+PREHOOK: Output: default@tbl
+POSTHOOK: query: insert into tbl values(2, 'two')
+POSTHOOK: type: QUERY
+POSTHOOK: Output: default@tbl
+POSTHOOK: Lineage: tbl.n EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ]
+POSTHOOK: Lineage: tbl.t SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ]
+Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl where n = 1) a  left outer join  (select * from tbl where 1 = 2) b on a.n = b.n
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl
+#### A masked pattern was here ####
+POSTHOOK: query: select a.n, a.t, isnull(b.n), isnull(b.t) from (select * from tbl where n = 1) a  left outer join  (select * from tbl where 1 = 2) b on a.n = b.n
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl
+#### A masked pattern was here ####
+1	one	true	true
+Warning: Map Join MAPJOIN[13][bigTable=?] in task 'Stage-3:MAPRED' is a cross product
+PREHOOK: query: select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl where 2 = 1) a  right outer join  (select * from tbl where n = 2) b on a.n = b.n
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl
+#### A masked pattern was here ####
+POSTHOOK: query: select isnull(a.n), isnull(a.t), b.n, b.t from (select * from tbl where 2 = 1) a  right outer join  (select * from tbl where n = 2) b on a.n = b.n
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl
+#### A masked pattern was here ####
+true	true	2	two
+Warning: Shuffle Join JOIN[8][tables = [$hdt$_0, $hdt$_1]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl where n = 1) a  full outer join  (select * from tbl where n = 2) b on a.n = b.n
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tbl
+#### A masked pattern was here ####
+POSTHOOK: query: select isnull(a.n), isnull(a.t), isnull(b.n), isnull(b.t) from (select * from tbl where n = 1) a  full outer join  (select * from tbl where n = 2) b on a.n = b.n
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tbl
+#### A masked pattern was here ####
+false	false	true	true
+true	true	false	false
+PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+11	1	1	0	0
+PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+11	1	1	0	0
+PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+11	1	1	0	0
+PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+POSTHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+#### A masked pattern was here ####
+11	1	1	0	0

http://git-wip-us.apache.org/repos/asf/hive/blob/b48ec404/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out b/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out
new file mode 100644
index 0000000..fc0de7d
--- /dev/null
+++ b/ql/src/test/results/clientpositive/beeline/udf_unix_timestamp.q.out
@@ -0,0 +1,117 @@
+PREHOOK: query: DESCRIBE FUNCTION unix_timestamp
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION unix_timestamp
+POSTHOOK: type: DESCFUNCTION
+unix_timestamp(date[, pattern]) - Converts the time to a number
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED unix_timestamp
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED unix_timestamp
+POSTHOOK: type: DESCFUNCTION
+unix_timestamp(date[, pattern]) - Converts the time to a number
+Converts the specified time to number of seconds since 1970-01-01. The unix_timestamp(void) overload is deprecated, use current_timestamp.
+Function class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFUnixTimeStamp
+Function type:BUILTIN
+PREHOOK: query: create table oneline(key int, value string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@oneline
+POSTHOOK: query: create table oneline(key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@oneline
+PREHOOK: query: load data local inpath '../../data/files/things.txt' into table oneline
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@oneline
+POSTHOOK: query: load data local inpath '../../data/files/things.txt' into table oneline
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@oneline
+PREHOOK: query: SELECT
+  '2009-03-20 11:30:01',
+  unix_timestamp('2009-03-20 11:30:01')
+FROM oneline
+PREHOOK: type: QUERY
+PREHOOK: Input: default@oneline
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT
+  '2009-03-20 11:30:01',
+  unix_timestamp('2009-03-20 11:30:01')
+FROM oneline
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@oneline
+#### A masked pattern was here ####
+2009-03-20 11:30:01	1237573801
+PREHOOK: query: SELECT
+  '2009-03-20',
+  unix_timestamp('2009-03-20', 'yyyy-MM-dd')
+FROM oneline
+PREHOOK: type: QUERY
+PREHOOK: Input: default@oneline
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT
+  '2009-03-20',
+  unix_timestamp('2009-03-20', 'yyyy-MM-dd')
+FROM oneline
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@oneline
+#### A masked pattern was here ####
+2009-03-20	1237532400
+PREHOOK: query: SELECT
+  '2009 Mar 20 11:30:01 am',
+  unix_timestamp('2009 Mar 20 11:30:01 am', 'yyyy MMM dd h:mm:ss a')
+FROM oneline
+PREHOOK: type: QUERY
+PREHOOK: Input: default@oneline
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT
+  '2009 Mar 20 11:30:01 am',
+  unix_timestamp('2009 Mar 20 11:30:01 am', 'yyyy MMM dd h:mm:ss a')
+FROM oneline
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@oneline
+#### A masked pattern was here ####
+2009 Mar 20 11:30:01 am	1237573801
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+unix_timestamp(void) is deprecated. Use current_timestamp instead.
+PREHOOK: query: create table foo as SELECT
+  'deprecated' as a,
+  unix_timestamp() as b
+FROM oneline
+PREHOOK: type: CREATETABLE_AS_SELECT
+PREHOOK: Input: default@oneline
+PREHOOK: Output: database:default
+PREHOOK: Output: default@foo
+POSTHOOK: query: create table foo as SELECT
+  'deprecated' as a,
+  unix_timestamp() as b
+FROM oneline
+POSTHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: Input: default@oneline
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@foo
+POSTHOOK: Lineage: foo.a SIMPLE []
+POSTHOOK: Lineage: foo.b SIMPLE []
+PREHOOK: query: drop table foo
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@foo
+PREHOOK: Output: default@foo
+POSTHOOK: query: drop table foo
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@foo
+POSTHOOK: Output: default@foo
+PREHOOK: query: SELECT
+  'random_string',
+  unix_timestamp('random_string')
+FROM oneline
+PREHOOK: type: QUERY
+PREHOOK: Input: default@oneline
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT
+  'random_string',
+  unix_timestamp('random_string')
+FROM oneline
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@oneline
+#### A masked pattern was here ####
+random_string	NULL